You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by mr...@apache.org on 2015/08/25 08:26:34 UTC

svn commit: r1697567 - in /jackrabbit/oak/branches/1.2: ./ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/

Author: mreutegg
Date: Tue Aug 25 06:26:33 2015
New Revision: 1697567

URL: http://svn.apache.org/r1697567
Log:
OAK-3259: Optimize NodeDocument.getNewestRevision()

Merged revisions 1697373,1697410 from trunk

Modified:
    jackrabbit/oak/branches/1.2/   (props changed)
    jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
    jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java
    jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentSplitTest.java
    jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/NodeDocumentTest.java

Propchange: jackrabbit/oak/branches/1.2/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 25 06:26:33 2015
@@ -1,3 +1,3 @@
 /jackrabbit/oak/branches/1.0:1665962
-/jackrabbit/oak/trunk:1672350,1672468,1672537,1672603,1672642,1672644,1672834-1672835,1673351,1673410,1673414-1673415,1673436,1673644,1673662-1673664,1673669,1673695,1673713,1673738,1673787,1673791,1674046,1674065,1674075,1674107,1674228,1674780,1674880,1675054-1675055,1675319,1675332,1675354,1675357,1675382,1675555,1675566,1675593,1676198,1676237,1676407,1676458,1676539,1676670,1676693,1676703,1676725,1677579,1677581,1677609,1677611,1677774,1677788,1677797,1677804,1677806,1677939,1677991,1678023,1678095-1678096,1678124,1678171,1678173,1678211,1678323,1678758,1678938,1678954,1679144,1679165,1679191,1679232,1679235,1679503,1679958,1679961,1680170,1680172,1680182,1680222,1680232,1680236,1680461,1680633,1680643,1680747,1680805-1680806,1680903,1681282,1681767,1681918,1681955,1682042,1682218,1682235,1682437,1682494,1682555,1682855,1682904,1683059,1683089,1683213,1683249,1683259,1683278,1683323,1683687,1683700,1684174-1684175,1684186,1684376,1684442,1684561,1684570,1684601,1684618,1684820
 ,1684868,1685023,1685075,1685370,1685552,1685589-1685590,1685840,1685964,1685977,1685989,1685999,1686023,1686032,1686097,1686162,1686229,1686234,1686253,1686414,1686780,1686854,1686857,1686971,1687053-1687055,1687175,1687196,1687198,1687220,1687239-1687240,1687301,1687441,1687553,1688089-1688090,1688172,1688179,1688349,1688421,1688436,1688453,1688616,1688622,1688634,1688636,1688817,1689003-1689004,1689008,1689577,1689581,1689623,1689810,1689828,1689831,1689833,1689903,1690017,1690043,1690047,1690057,1690247,1690249,1690634-1690637,1690650,1690669,1690674,1690885,1690941,1691139,1691151,1691159,1691167,1691183,1691188,1691210,1691280,1691307,1691331-1691333,1691345,1691384-1691385,1691401,1691509,1692133-1692134,1692156,1692250,1692274,1692363,1692382,1692478,1692955,1693002,1693030,1693209,1693421,1693525-1693526,1694007,1694393-1694394,1695050,1695122,1695280,1695299,1695457,1695482,1695507,1695521,1695540,1696194,1696242,1696285,1696578,1696759,1696916
+/jackrabbit/oak/trunk:1672350,1672468,1672537,1672603,1672642,1672644,1672834-1672835,1673351,1673410,1673414-1673415,1673436,1673644,1673662-1673664,1673669,1673695,1673713,1673738,1673787,1673791,1674046,1674065,1674075,1674107,1674228,1674780,1674880,1675054-1675055,1675319,1675332,1675354,1675357,1675382,1675555,1675566,1675593,1676198,1676237,1676407,1676458,1676539,1676670,1676693,1676703,1676725,1677579,1677581,1677609,1677611,1677774,1677788,1677797,1677804,1677806,1677939,1677991,1678023,1678095-1678096,1678124,1678171,1678173,1678211,1678323,1678758,1678938,1678954,1679144,1679165,1679191,1679232,1679235,1679503,1679958,1679961,1680170,1680172,1680182,1680222,1680232,1680236,1680461,1680633,1680643,1680747,1680805-1680806,1680903,1681282,1681767,1681918,1681955,1682042,1682218,1682235,1682437,1682494,1682555,1682855,1682904,1683059,1683089,1683213,1683249,1683259,1683278,1683323,1683687,1683700,1684174-1684175,1684186,1684376,1684442,1684561,1684570,1684601,1684618,1684820
 ,1684868,1685023,1685075,1685370,1685552,1685589-1685590,1685840,1685964,1685977,1685989,1685999,1686023,1686032,1686097,1686162,1686229,1686234,1686253,1686414,1686780,1686854,1686857,1686971,1687053-1687055,1687175,1687196,1687198,1687220,1687239-1687240,1687301,1687441,1687553,1688089-1688090,1688172,1688179,1688349,1688421,1688436,1688453,1688616,1688622,1688634,1688636,1688817,1689003-1689004,1689008,1689577,1689581,1689623,1689810,1689828,1689831,1689833,1689903,1690017,1690043,1690047,1690057,1690247,1690249,1690634-1690637,1690650,1690669,1690674,1690885,1690941,1691139,1691151,1691159,1691167,1691183,1691188,1691210,1691280,1691307,1691331-1691333,1691345,1691384-1691385,1691401,1691509,1692133-1692134,1692156,1692250,1692274,1692363,1692382,1692478,1692955,1693002,1693030,1693209,1693421,1693525-1693526,1694007,1694393-1694394,1695050,1695122,1695280,1695299,1695457,1695482,1695507,1695521,1695540,1696194,1696242,1696285,1696578,1696759,1696916,1697373,1697410
 /jackrabbit/trunk:1345480

Modified: jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java?rev=1697567&r1=1697566&r2=1697567&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java (original)
+++ jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java Tue Aug 25 06:26:33 2015
@@ -24,6 +24,7 @@ import java.util.Map.Entry;
 import java.util.NavigableMap;
 import java.util.Queue;
 import java.util.SortedMap;
+import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
@@ -51,11 +52,13 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.collect.Iterables.filter;
 import static com.google.common.collect.Iterables.transform;
 import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES;
+import static org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator.REVERSE;
 import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
 import static org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
 import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isRevisionNewer;
@@ -711,9 +714,7 @@ public final class NodeDocument extends
                                 "_revisions {}, _commitRoot {}",
                         changeRev, getId(), getLocalRevisions(), getLocalCommitRoot());
             }
-            it = filter(Iterables.mergeSorted(
-                    ImmutableList.of(getValueMap(REVISIONS).keySet(), getValueMap(COMMIT_ROOT).keySet()),
-                    revisions.comparator()), predicate).iterator();
+            it = filter(getAllChanges(), predicate).iterator();
             if (it.hasNext()) {
                 newestRev = it.next();
             }
@@ -1059,7 +1060,7 @@ public final class NodeDocument extends
     @Nonnull
     public Iterable<UpdateOp> split(@Nonnull RevisionContext context,
                                     @Nonnull Revision head) {
-        return SplitOperations.forDocument(this, context, head);
+        return SplitOperations.forDocument(this, context, head, NUM_REVS_THRESHOLD);
     }
 
     /**
@@ -1113,8 +1114,8 @@ public final class NodeDocument extends
             if (!includeStale) {
                 stale = getLocalMap(STALE_PREV);
             }
-            NavigableMap<Revision, Range> transformed = new TreeMap<Revision, Range>(
-                    StableRevisionComparator.REVERSE);
+            NavigableMap<Revision, Range> transformed =
+                    new TreeMap<Revision, Range>(REVERSE);
             for (Map.Entry<Revision, String> entry : map.entrySet()) {
                 Range r = Range.fromEntry(entry.getKey(), entry.getValue());
                 if (String.valueOf(r.height).equals(stale.get(r.high))) {
@@ -1186,6 +1187,7 @@ public final class NodeDocument extends
     NodeDocument getPreviousDocument(String prevId){
         //Use the maxAge variant such that in case of Mongo call for
         //previous doc are directed towards replicas first
+        LOG.trace("get previous document {}", prevId);
         return store.find(Collection.NODES, prevId, Integer.MAX_VALUE);
     }
 
@@ -1215,6 +1217,53 @@ public final class NodeDocument extends
         };
     }
 
+    /**
+     * Returns previous leaf documents. Those are the previous documents with
+     * a type {@code !=} {@link SplitDocType#INTERMEDIATE}. The documents are
+     * returned in descending order based on the most recent change recorded
+     * in the previous document. A change is defined as an entry in either the
+     * {@link #REVISIONS} or {@link #COMMIT_ROOT} map.
+     *
+     * @return the leaf documents in descending order.
+     */
+    @Nonnull
+    Iterator<NodeDocument> getPreviousDocLeaves() {
+        if (getPreviousRanges().isEmpty()) {
+            return Iterators.emptyIterator();
+        }
+        // create a mutable copy
+        final NavigableMap<Revision, Range> ranges = Maps.newTreeMap(getPreviousRanges());
+        return new AbstractIterator<NodeDocument>() {
+            @Override
+            protected NodeDocument computeNext() {
+                NodeDocument next;
+                for (;;) {
+                    Map.Entry<Revision, Range> topEntry = ranges.pollFirstEntry();
+                    if (topEntry == null) {
+                        // no more ranges
+                        next = endOfData();
+                        break;
+                    }
+                    NodeDocument prev = getPreviousDoc(topEntry.getKey(), topEntry.getValue());
+                    if (prev == null) {
+                        // move on to next range
+                        continue;
+                    }
+                    if (topEntry.getValue().getHeight() == 0) {
+                        // this is a leaf
+                        next = prev;
+                        break;
+                    } else {
+                        // replace intermediate entry with its previous ranges
+                        ranges.putAll(prev.getPreviousRanges());
+                    }
+                }
+                return next;
+            }
+        };
+    }
+
+    @CheckForNull
     private NodeDocument getPreviousDoc(Revision rev, Range range){
         int h = range.height;
         String prevId = Utils.getPreviousIdFor(getMainPath(), rev, h);
@@ -1262,6 +1311,96 @@ public final class NodeDocument extends
     }
 
     /**
+     * Returns an {@link Iterable} of {@link Revision} of all changes performed
+     * on this document. This covers all entries for {@link #REVISIONS} and
+     * {@link #COMMIT_ROOT} including previous documents. The revisions are
+     * returned in descending stable revision order using
+     * {@link StableRevisionComparator#REVERSE}.
+     *
+     * @return revisions of all changes performed on this document.
+     */
+    Iterable<Revision> getAllChanges() {
+        final SortedSet<Revision> stack = Sets.newTreeSet(REVERSE);
+        // initialize with local revisions and commitRoot entries
+        stack.addAll(getLocalCommitRoot().keySet());
+        stack.addAll(getLocalRevisions().keySet());
+        if (getPreviousRanges().isEmpty()) {
+            return stack;
+        }
+        return new Iterable<Revision>() {
+            @Override
+            public Iterator<Revision> iterator() {
+                final Iterator<NodeDocument> previousDocs = getPreviousDocLeaves();
+                return new AbstractIterator<Revision>() {
+                    private NodeDocument nextDoc;
+                    private Revision nextRevision;
+                    @Override
+                    protected Revision computeNext() {
+                        if (stack.isEmpty()) {
+                            return endOfData();
+                        }
+                        Revision next = stack.first();
+                        stack.remove(next);
+                        fillStackIfNeeded();
+                        return next;
+                    }
+
+                    private void fillStackIfNeeded() {
+                        for (;;) {
+                            fetchNextDoc();
+
+                            // no more changes to compare with
+                            if (nextDoc == null) {
+                                return;
+                            }
+
+                            // check if current top revision is still newer than
+                            // most recent revision of next document
+                            if (!stack.isEmpty()) {
+                                Revision top = stack.first();
+                                if (top.compareRevisionTimeThenClusterId(nextRevision) > 0) {
+                                    return;
+                                }
+                            }
+
+                            // if we get here, we need to pull in changes
+                            // from nextDoc
+                            Iterables.addAll(stack, nextDoc.getAllChanges());
+                            nextDoc = null;
+                            nextRevision = null;
+                        }
+                    }
+
+                    /**
+                     * Fetch the next document if {@code nextDoc} is
+                     * {@code null} and there are more documents.
+                     */
+                    private void fetchNextDoc() {
+                        for (;;) {
+                            if (nextDoc != null) {
+                                break;
+                            }
+                            if (!previousDocs.hasNext()) {
+                                // no more previous docs
+                                break;
+                            }
+                            nextDoc = previousDocs.next();
+                            Iterator<Revision> changes = nextDoc.getAllChanges().iterator();
+                            if (changes.hasNext()) {
+                                nextRevision = changes.next();
+                                break;
+                            } else {
+                                // empty document, try next
+                                nextDoc = null;
+                            }
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /**
      * Returns the local value map for the given key.
      *
      * @param key the key.
@@ -1759,7 +1898,7 @@ public final class NodeDocument extends
         case JsopReader.STRING:
             return json.getToken();
         case '{':
-            TreeMap<Revision, Object> map = new TreeMap<Revision, Object>(StableRevisionComparator.REVERSE);
+            TreeMap<Revision, Object> map = new TreeMap<Revision, Object>(REVERSE);
             while (true) {
                 if (json.matches('}')) {
                     break;

Modified: jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java?rev=1697567&r1=1697566&r2=1697567&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java (original)
+++ jackrabbit/oak/branches/1.2/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/SplitOperations.java Tue Aug 25 06:26:33 2015
@@ -44,7 +44,6 @@ import static com.google.common.base.Pre
 import static com.google.common.collect.Sets.filter;
 import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.COMMIT_ROOT;
 import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.DOC_SIZE_THRESHOLD;
-import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.NUM_REVS_THRESHOLD;
 import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.PREV_SPLIT_FACTOR;
 import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.REVISIONS;
 import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.SPLIT_RATIO;
@@ -72,6 +71,7 @@ class SplitOperations {
     private final String id;
     private final Revision headRevision;
     private final RevisionContext context;
+    private final int numRevsThreshold;
     private Revision high;
     private Revision low;
     private int numValues;
@@ -86,12 +86,14 @@ class SplitOperations {
 
     private SplitOperations(@Nonnull NodeDocument doc,
                             @Nonnull RevisionContext context,
-                            @Nonnull Revision headRevision) {
+                            @Nonnull Revision headRevision,
+                            int numRevsThreshold) {
         this.doc = checkNotNull(doc);
         this.context = checkNotNull(context);
         this.path = doc.getPath();
         this.id = doc.getId();
         this.headRevision = checkNotNull(headRevision);
+        this.numRevsThreshold = numRevsThreshold;
     }
 
     /**
@@ -106,6 +108,7 @@ class SplitOperations {
      * @param context the revision context.
      * @param headRevision the head revision before the document was retrieved
      *                     from the document store.
+     * @param numRevsThreshold only split off at least this number of revisions.
      * @return list of update operations. An empty list indicates the document
      *          does not require a split.
      * @throws IllegalArgumentException if the given document is a split
@@ -114,12 +117,13 @@ class SplitOperations {
     @Nonnull
     static List<UpdateOp> forDocument(@Nonnull NodeDocument doc,
                                       @Nonnull RevisionContext context,
-                                      @Nonnull Revision headRevision) {
+                                      @Nonnull Revision headRevision,
+                                      int numRevsThreshold) {
         if (doc.isSplitDocument()) {
             throw new IllegalArgumentException(
                     "Not a main document: " + doc.getId());
         }
-        return new SplitOperations(doc, context, headRevision).create();
+        return new SplitOperations(doc, context, headRevision, numRevsThreshold).create();
 
     }
 
@@ -168,7 +172,7 @@ class SplitOperations {
         SortedMap<Revision, Range> previous = doc.getPreviousRanges();
         // only consider if there are enough commits,
         // unless document is really big
-        return doc.getLocalRevisions().size() + doc.getLocalCommitRoot().size() > NUM_REVS_THRESHOLD
+        return doc.getLocalRevisions().size() + doc.getLocalCommitRoot().size() > numRevsThreshold
                 || doc.getMemory() >= DOC_SIZE_THRESHOLD
                 || previous.size() >= PREV_SPLIT_FACTOR
                 || !doc.getStalePrev().isEmpty();
@@ -302,7 +306,7 @@ class SplitOperations {
         UpdateOp main = null;
         // check if we have enough data to split off
         if (high != null && low != null
-                && (numValues >= NUM_REVS_THRESHOLD
+                && (numValues >= numRevsThreshold
                 || doc.getMemory() > DOC_SIZE_THRESHOLD)) {
             // enough changes to split off
             // move to another document
@@ -336,7 +340,7 @@ class SplitOperations {
             setSplitDocProps(doc, oldDoc, old, high);
             // only split if enough of the data can be moved to old document
             if (oldDoc.getMemory() > doc.getMemory() * SPLIT_RATIO
-                    || numValues >= NUM_REVS_THRESHOLD) {
+                    || numValues >= numRevsThreshold) {
                 splitOps.add(old);
             } else {
                 main = null;

Modified: jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentSplitTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentSplitTest.java?rev=1697567&r1=1697566&r2=1697567&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentSplitTest.java (original)
+++ jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentSplitTest.java Tue Aug 25 06:26:33 2015
@@ -670,7 +670,7 @@ public class DocumentSplitTest extends B
         doc.put(NodeDocument.ID, Utils.getIdFromPath("/test"));
         doc.put(NodeDocument.SD_TYPE, NodeDocument.SplitDocType.DEFAULT.type);
         Revision head = mk.getNodeStore().getHeadRevision();
-        SplitOperations.forDocument(doc, DummyRevisionContext.INSTANCE, head);
+        SplitOperations.forDocument(doc, DummyRevisionContext.INSTANCE, head, NUM_REVS_THRESHOLD);
     }
 
     @Test
@@ -779,7 +779,7 @@ public class DocumentSplitTest extends B
             for (String id : ns.getSplitCandidates()) {
                 Revision head = ns.getHeadRevision();
                 NodeDocument doc = store.find(NODES, id);
-                List<UpdateOp> ops = SplitOperations.forDocument(doc, rc, head);
+                List<UpdateOp> ops = SplitOperations.forDocument(doc, rc, head, NUM_REVS_THRESHOLD);
                 Set<Revision> removed = Sets.newHashSet();
                 Set<Revision> added = Sets.newHashSet();
                 for (UpdateOp op : ops) {

Modified: jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/NodeDocumentTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/NodeDocumentTest.java?rev=1697567&r1=1697566&r2=1697567&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/NodeDocumentTest.java (original)
+++ jackrabbit/oak/branches/1.2/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/NodeDocumentTest.java Tue Aug 25 06:26:33 2015
@@ -16,14 +16,32 @@
  */
 package org.apache.jackrabbit.oak.plugins.document;
 
+import java.util.Collections;
 import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
 
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.VersionGCStats;
 import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore;
 import org.apache.jackrabbit.oak.plugins.document.util.Utils;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeStore;
 import org.junit.Test;
 
+import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES;
 import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.revisionAreAmbiguous;
 import static org.apache.jackrabbit.oak.plugins.document.Revision.RevisionComparator;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
@@ -85,4 +103,270 @@ public class NodeDocumentTest {
         assertTrue(revisionAreAmbiguous(context, r1, r2));
         assertTrue(revisionAreAmbiguous(context, r2, r1));
     }
+
+    @Test
+    public void getAllChanges() throws Exception {
+        final int NUM_CHANGES = 200;
+        DocumentNodeStore ns = createTestStore(NUM_CHANGES);
+        Revision previous = ns.newRevision();
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        for (Revision r : root.getAllChanges()) {
+            assertTrue(previous.compareRevisionTime(r) > 0);
+            previous = r;
+        }
+        // NUM_CHANGES + one revision when node was created
+        assertEquals(NUM_CHANGES + 1, Iterables.size(root.getAllChanges()));
+        ns.dispose();
+    }
+
+    @Test
+    public void getAllChangesAfterGC1() throws Exception {
+        int numChanges = 200;
+        DocumentNodeStore ns = createTestStore(numChanges);
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        // remove most recent previous doc
+        NodeDocument toRemove = root.getAllPreviousDocs().next();
+        int numDeleted = new SplitDocumentCleanUp(ns.store, new VersionGCStats(),
+                Collections.singleton(toRemove)).disconnect().deleteSplitDocuments();
+        assertEquals(1, numDeleted);
+        numChanges -= Iterables.size(toRemove.getAllChanges());
+
+        root = getRootDocument(ns.getDocumentStore());
+        Revision previous = ns.newRevision();
+        for (Revision r : root.getAllChanges()) {
+            assertTrue(previous.compareRevisionTime(r) > 0);
+            previous = r;
+        }
+        // numChanges + one revision when node was created
+        assertEquals(numChanges + 1, Iterables.size(root.getAllChanges()));
+        ns.dispose();
+    }
+
+    @Test
+    public void getAllChangesAfterGC2() throws Exception {
+        int numChanges = 200;
+        DocumentNodeStore ns = createTestStore(numChanges);
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        // remove oldest previous doc
+        NodeDocument toRemove = Iterators.getLast(root.getAllPreviousDocs());
+        int numDeleted = new SplitDocumentCleanUp(ns.store, new VersionGCStats(),
+                Collections.singleton(toRemove)).disconnect().deleteSplitDocuments();
+        assertEquals(1, numDeleted);
+        numChanges -= Iterables.size(toRemove.getAllChanges());
+
+        root = getRootDocument(ns.getDocumentStore());
+        Revision previous = ns.newRevision();
+        for (Revision r : root.getAllChanges()) {
+            assertTrue(previous.compareRevisionTime(r) > 0);
+            previous = r;
+        }
+        // numChanges + one revision when node was created
+        assertEquals(numChanges + 1, Iterables.size(root.getAllChanges()));
+        ns.dispose();
+    }
+
+    @Test
+    public void getAllChangesCluster() throws Exception {
+        final int NUM_CLUSTER_NODES = 3;
+        final int NUM_CHANGES = 500;
+        DocumentStore store = new MemoryDocumentStore();
+        List<DocumentNodeStore> docStores = Lists.newArrayList();
+        for (int i = 0; i < NUM_CLUSTER_NODES; i++) {
+            DocumentNodeStore ns = new DocumentMK.Builder()
+                    .setDocumentStore(store)
+                    .setAsyncDelay(0).getNodeStore();
+            docStores.add(ns);
+        }
+        Random r = new Random(42);
+        for (int i = 0; i < NUM_CHANGES; i++) {
+            // randomly pick a clusterNode
+            int clusterIdx = r.nextInt(NUM_CLUSTER_NODES);
+            DocumentNodeStore ns = docStores.get(clusterIdx);
+            NodeBuilder builder = ns.getRoot().builder();
+            builder.setProperty("p-" + clusterIdx, i);
+            merge(ns, builder);
+            if (r.nextFloat() < 0.2) {
+                Revision head = ns.getHeadRevision();
+                for (UpdateOp op : SplitOperations.forDocument(
+                        getRootDocument(store), ns, head, 2)) {
+                    store.createOrUpdate(NODES, op);
+                }
+            }
+        }
+        DocumentNodeStore ns = docStores.get(0);
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        Revision previous = ns.newRevision();
+        for (Revision rev : root.getAllChanges()) {
+            assertTrue(previous.compareRevisionTimeThenClusterId(rev) > 0);
+            previous = rev;
+        }
+        // numChanges + one revision when node was created
+        assertEquals(NUM_CHANGES + 1, Iterables.size(root.getAllChanges()));
+
+        for (DocumentNodeStore dns : docStores) {
+            dns.dispose();
+        }
+    }
+
+    @Test
+    public void getPreviousDocLeaves() throws Exception {
+        DocumentNodeStore ns = createTestStore(200);
+        Revision previous = ns.newRevision();
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        Iterator<NodeDocument> it = root.getPreviousDocLeaves();
+        while (it.hasNext()) {
+            NodeDocument leaf = it.next();
+            Revision r = leaf.getAllChanges().iterator().next();
+            assertTrue(previous.compareRevisionTime(r) > 0);
+            previous = r;
+        }
+        ns.dispose();
+    }
+
+    @Test
+    public void getPreviousDocLeavesAfterGC1() throws Exception {
+        DocumentNodeStore ns = createTestStore(200);
+        Revision previous = ns.newRevision();
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        int numLeaves = Iterators.size(root.getPreviousDocLeaves());
+        // remove most recent previous doc
+        NodeDocument toRemove = root.getAllPreviousDocs().next();
+        int numDeleted = new SplitDocumentCleanUp(ns.store, new VersionGCStats(),
+                Collections.singleton(toRemove)).disconnect().deleteSplitDocuments();
+        assertEquals(1, numDeleted);
+
+        root = getRootDocument(ns.getDocumentStore());
+        assertEquals(numLeaves - 1, Iterators.size(root.getPreviousDocLeaves()));
+        Iterator<NodeDocument> it = root.getPreviousDocLeaves();
+        while (it.hasNext()) {
+            NodeDocument leaf = it.next();
+            Revision r = leaf.getAllChanges().iterator().next();
+            assertTrue(previous.compareRevisionTime(r) > 0);
+            previous = r;
+        }
+        ns.dispose();
+    }
+
+    @Test
+    public void getPreviousDocLeavesAfterGC2() throws Exception {
+        DocumentNodeStore ns = createTestStore(200);
+        Revision previous = ns.newRevision();
+        NodeDocument root = getRootDocument(ns.getDocumentStore());
+        int numLeaves = Iterators.size(root.getPreviousDocLeaves());
+        // remove oldest previous doc
+        NodeDocument toRemove = Iterators.getLast(root.getAllPreviousDocs());
+        int numDeleted = new SplitDocumentCleanUp(ns.store, new VersionGCStats(),
+                Collections.singleton(toRemove)).disconnect().deleteSplitDocuments();
+        assertEquals(1, numDeleted);
+
+        root = getRootDocument(ns.getDocumentStore());
+        assertEquals(numLeaves - 1, Iterators.size(root.getPreviousDocLeaves()));
+        Iterator<NodeDocument> it = root.getPreviousDocLeaves();
+        while (it.hasNext()) {
+            NodeDocument leaf = it.next();
+            Revision r = leaf.getAllChanges().iterator().next();
+            assertTrue(previous.compareRevisionTime(r) > 0);
+            previous = r;
+        }
+        ns.dispose();
+    }
+
+    @Test
+    public void getNewestRevisionTooExpensive() throws Exception {
+        final int NUM_CHANGES = 200;
+        final Set<String> prevDocCalls = Sets.newHashSet();
+        DocumentStore store = new MemoryDocumentStore() {
+            @Override
+            public <T extends Document> T find(Collection<T> collection,
+                                               String key) {
+                if (Utils.getPathFromId(key).startsWith("p")) {
+                    prevDocCalls.add(key);
+                }
+                return super.find(collection, key);
+            }
+        };
+        DocumentNodeStore ns = new DocumentMK.Builder()
+                .setDocumentStore(store)
+                .setAsyncDelay(0).getNodeStore();
+        // create test data
+        for (int i = 0; i < NUM_CHANGES; i++) {
+            NodeBuilder builder = ns.getRoot().builder();
+            if (builder.hasChildNode("test")) {
+                builder.child("test").remove();
+                builder.child("foo").remove();
+            } else {
+                builder.child("test");
+                builder.child("foo");
+            }
+            merge(ns, builder);
+            if (Math.random() < 0.2) {
+                Revision head = ns.getHeadRevision();
+                NodeDocument doc = ns.getDocumentStore().find(
+                        NODES, Utils.getIdFromPath("/test"));
+                for (UpdateOp op : SplitOperations.forDocument(
+                        doc, ns, head, 2)) {
+                    store.createOrUpdate(NODES, op);
+                }
+            }
+        }
+        NodeDocument doc = ns.getDocumentStore().find(
+                NODES, Utils.getIdFromPath("/test"));
+        // get most recent previous doc
+        NodeDocument prev = doc.getAllPreviousDocs().next();
+        // simulate a change revision within the range of
+        // the most recent previous document
+        Iterable<Revision> changes = prev.getAllChanges();
+        Revision changeRev = new Revision(Iterables.getLast(changes).getTimestamp(), 1000, ns.getClusterId());
+        // reset calls to previous documents
+        prevDocCalls.clear();
+        doc.getNewestRevision(ns, changeRev, new CollisionHandler() {
+            @Override
+            void concurrentModification(Revision other) {
+                // ignore
+            }
+        });
+        // must not read all previous docs
+        assertTrue("too many calls for previous documents: " + prevDocCalls,
+                prevDocCalls.size() <= 4);
+
+        ns.dispose();
+    }
+
+    private DocumentNodeStore createTestStore(int numChanges) throws Exception {
+        return createTestStore(new MemoryDocumentStore(), numChanges);
+    }
+
+    private DocumentNodeStore createTestStore(DocumentStore store,
+                                              int numChanges) throws Exception {
+        DocumentNodeStore ns = new DocumentMK.Builder()
+                .setDocumentStore(store)
+                .setAsyncDelay(0).getNodeStore();
+        for (int i = 0; i < numChanges; i++) {
+            NodeBuilder builder = ns.getRoot().builder();
+            builder.setProperty("p", i);
+            merge(ns, builder);
+            if (Math.random() < 0.2) {
+                Revision head = ns.getHeadRevision();
+                for (UpdateOp op : SplitOperations.forDocument(
+                        getRootDocument(store), ns, head, 2)) {
+                    store.createOrUpdate(NODES, op);
+                }
+            }
+        }
+        return ns;
+    }
+
+    private void merge(NodeStore store, NodeBuilder builder)
+            throws CommitFailedException {
+        store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+    }
+
+    private static NodeDocument getRootDocument(DocumentStore store) {
+        String rootId = Utils.getIdFromPath("/");
+        NodeDocument root = store.find(Collection.NODES, rootId);
+        if (root == null) {
+            throw new IllegalStateException("missing root document");
+        }
+        return root;
+    }
 }