You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by ju...@apache.org on 2013/10/03 21:19:44 UTC

svn commit: r1528979 - in /jackrabbit/oak/trunk: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/ oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/ oak-jcr/

Author: jukka
Date: Thu Oct  3 19:19:43 2013
New Revision: 1528979

URL: http://svn.apache.org/r1528979
Log:
OAK-1031: SegmentMK: Fewer segment lookups

Merge MapLeaf and MapBranch to the MapRecord base class so we can instantiate it without reading the record.

Removed:
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapBranch.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapLeaf.java
Modified:
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapEntry.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapRecord.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeState.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeState.java
    jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentSizeTest.java
    jackrabbit/oak/trunk/oak-jcr/pom.xml

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapEntry.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapEntry.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapEntry.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapEntry.java Thu Oct  3 19:19:43 2013
@@ -49,7 +49,7 @@ class MapEntry extends AbstractChildNode
     }
 
     public int getHash() {
-        return name.hashCode();
+        return MapRecord.getHash(name);
     }
 
     //----------------------------------------------------< ChildNodeEntry >--
@@ -87,7 +87,7 @@ class MapEntry extends AbstractChildNode
     @Override
     public int compareTo(MapEntry that) {
         return ComparisonChain.start()
-                .compare(name.hashCode(), that.name.hashCode())
+                .compare(getHash(), that.getHash())
                 .compare(name, that.name)
                 .compare(value, that.value)
                 .result();

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapRecord.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapRecord.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapRecord.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapRecord.java Thu Oct  3 19:19:43 2013
@@ -16,15 +16,47 @@
  */
 package org.apache.jackrabbit.oak.plugins.segment;
 
-import static com.google.common.base.Preconditions.checkElementIndex;
-import static com.google.common.base.Preconditions.checkPositionIndex;
-import static com.google.common.collect.Sets.newHashSet;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.collect.Iterables.concat;
+import static com.google.common.collect.Iterables.transform;
+import static com.google.common.collect.Lists.newArrayListWithCapacity;
+import static java.lang.Integer.bitCount;
 import static java.lang.Integer.highestOneBit;
 import static java.lang.Integer.numberOfTrailingZeros;
 
-import java.util.Set;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import com.google.common.base.Function;
+import com.google.common.base.Objects;
+import com.google.common.collect.ComparisonChain;
 
-abstract class MapRecord extends Record {
+class MapRecord extends Record {
+
+    private static final long M = 0x5DEECE66DL;
+    private static final long A = 0xBL;
+
+    static int getHash(String name) {
+        return (int) (((name.hashCode() ^ M) * M + A) >> 16);
+    }
+
+    private static final Function<MapRecord, Iterable<String>> GET_KEYS =
+            new Function<MapRecord, Iterable<String>>() {
+                @Override
+                public Iterable<String> apply(MapRecord input) {
+                    return input.getKeys();
+                }
+            };
+
+    private static final Function<MapRecord, Iterable<MapEntry>> GET_ENTRIES =
+            new Function<MapRecord, Iterable<MapEntry>>() {
+                @Override
+                public Iterable<MapEntry> apply(MapRecord input) {
+                    return input.getEntries();
+                }
+            };
 
     /**
      * Number of bits of the hash code to look at on each level of the trie.
@@ -58,31 +90,157 @@ abstract class MapRecord extends Record 
      */
     protected static final int MAX_SIZE = (1 << SIZE_BITS) - 1; // ~268e6
 
-    protected final int size;
+    protected MapRecord(Segment segment, RecordId id) {
+        super(segment, id);
+    }
 
-    protected final int level;
+    boolean isLeaf() {
+        int head = getSegment().readInt(getOffset(0));
+        return !isBranch(getSize(head), getLevel(head));
+    }
 
-    protected MapRecord(Segment segment, int offset, int size, int level) {
-        super(segment, offset);
-        this.size = checkElementIndex(size, MAX_SIZE);
-        this.level = checkPositionIndex(level, MAX_NUMBER_OF_LEVELS);
+    RecordId[] getBuckets() {
+        return getBuckets(getSegment());
     }
 
-    protected MapRecord(Segment segment, RecordId id, int size, int level) {
-        super(segment, id);
-        this.size = checkElementIndex(size, MAX_SIZE);
-        this.level = checkPositionIndex(level, MAX_NUMBER_OF_LEVELS);
+    private RecordId[] getBuckets(Segment segment) {
+        RecordId[] buckets = new RecordId[BUCKETS_PER_LEVEL];
+        int bitmap = segment.readInt(getOffset(4));
+        int ids = 0;
+        for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
+            if ((bitmap & (1 << i)) != 0) {
+                buckets[i] = segment.readRecordId(getOffset(8, ids++));
+            } else {
+                buckets[i] = null;
+            }
+        }
+        return buckets;
+    }
+
+    private List<MapRecord> getBucketList(Segment segment) {
+        List<MapRecord> buckets = newArrayListWithCapacity(BUCKETS_PER_LEVEL);
+        int bitmap = segment.readInt(getOffset(4));
+        int ids = 0;
+        for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
+            if ((bitmap & (1 << i)) != 0) {
+                RecordId id = segment.readRecordId(getOffset(8, ids++));
+                buckets.add(new MapRecord(segment, id));
+            }
+        }
+        return buckets;
     }
 
     int size() {
-        return size;
+        int head = getSegment().readInt(getOffset(0));
+        return getSize(head);
     }
 
-    abstract MapEntry getEntry(String key);
+    MapEntry getEntry(String key) {
+        checkNotNull(key);
+        Segment segment = getSegment();
+
+        int head = segment.readInt(getOffset(0));
+        int size = getSize(head);
+        if (size == 0) {
+            return null; // shortcut
+        }
+
+        int hash = getHash(key);
+        int level = getLevel(head);
+        if (isBranch(size, level)) {
+            // this is an intermediate branch record
+            // check if a matching bucket exists, and recurse 
+            int bitmap = segment.readInt(getOffset(4));
+            int mask = BUCKETS_PER_LEVEL - 1;
+            int shift = 32 - (level + 1) * LEVEL_BITS;
+            int index = (hash >> shift) & mask;
+            int bit = 1 << index;
+            if ((bitmap & bit) != 0) {
+                int ids = bitCount(bitmap & (bit - 1));
+                RecordId id = segment.readRecordId(getOffset(8, ids));
+                return new MapRecord(segment, id).getEntry(key);
+            } else {
+                return null;
+            }
+        }
 
-    abstract Iterable<String> getKeys();
+        // this is a leaf record; scan the list to find a matching entry
+        int d = -1;
+        for (int i = 0; i < size && d < 0; i++) {
+            d = Integer.valueOf(segment.readInt(getOffset(4 + i * 4)))
+                    .compareTo(Integer.valueOf(hash));
+            if (d == 0) {
+                RecordId keyId = segment.readRecordId(
+                        getOffset(4 + size * 4, i));
+                d = segment.readString(keyId).compareTo(key);
+                if (d == 0) {
+                    RecordId valueId = segment.readRecordId(
+                            getOffset(4 + size * 4, size + i));
+                    return new MapEntry(segment, key, keyId, valueId);
+                }
+            }
+        }
+
+        return null;
+    }
+
+    Iterable<String> getKeys() {
+        Segment segment = getSegment();
+
+        int head = segment.readInt(getOffset(0));
+        int size = getSize(head);
+        if (size == 0) {
+            return Collections.emptyList(); // shortcut
+        }
+
+        int level = getLevel(head);
+        if (isBranch(size, level)) {
+            return concat(transform(getBucketList(segment), GET_KEYS));
+        }
+
+        RecordId[] ids = new RecordId[size];
+        for (int i = 0; i < size; i++) {
+            ids[i] = segment.readRecordId(getOffset(4 + size * 4, i));
+        }
+
+        String[] keys = new String[size];
+        for (int i = 0; i < size; i++) {
+            keys[i] = segment.readString(ids[i]);
+        }
+        return Arrays.asList(keys);
+    }
+
+    Iterable<MapEntry> getEntries() {
+        Segment segment = getSegment();
+
+        int head = segment.readInt(getOffset(0));
+        int size = getSize(head);
+        if (size == 0) {
+            return Collections.emptyList(); // shortcut
+        }
+
+        int level = getLevel(head);
+        if (isBranch(size, level)) {
+            return concat(transform(getBucketList(segment), GET_ENTRIES));
+        }
+
+        RecordId[] keys = new RecordId[size];
+        for (int i = 0; i < size; i++) {
+            keys[i] = segment.readRecordId(getOffset(4 + size * 4, i));
+        }
+
+        RecordId[] values = new RecordId[size];
+        for (int i = 0; i < size; i++) {
+            values[i] = segment.readRecordId(getOffset(4 + size * 4, size + i));
+        }
 
-    abstract Iterable<MapEntry> getEntries();
+        MapEntry[] entries = new MapEntry[size];
+        for (int i = 0; i < size; i++) {
+            String name = segment.readString(keys[i]);
+            entries[i] = new MapEntry(segment, name, keys[i], values[i]);
+        }
+        return Arrays.asList(entries);
+    }
 
     boolean compareAgainstEmptyMap(MapDiff diff) {
         for (MapEntry entry : getEntries()) {
@@ -99,31 +257,8 @@ abstract class MapRecord extends Record 
         boolean entryDeleted(MapEntry before);
     }
 
-    boolean compare(MapRecord that, MapDiff diff) {
-        Set<String> keys = newHashSet();
-        for (MapEntry entry : getEntries()) {
-            String name = entry.getName();
-            MapEntry thatEntry = that.getEntry(name);
-            if (thatEntry == null) {
-                if (!diff.entryAdded(entry)) {
-                    return false;
-                }
-            } else if (!entry.getValue().equals(thatEntry.getValue())) {
-                if (!diff.entryChanged(thatEntry, entry)) {
-                    return false;
-                }
-            }
-            keys.add(name);
-        }
-        for (MapEntry entry : that.getEntries()) {
-            String name = entry.getName();
-            if (!keys.contains(name)) {
-                if (!diff.entryDeleted(entry)) {
-                    return false;
-                }
-            }
-        }
-        return true;
+    boolean compare(MapRecord base, MapDiff diff) {
+        return compare(base, this, diff);
     }
 
     //------------------------------------------------------------< Object >--
@@ -147,4 +282,115 @@ abstract class MapRecord extends Record 
         }
     }
 
+    //-----------------------------------------------------------< private >--
+
+    private static boolean compare(
+            MapRecord before, MapRecord after, MapDiff diff) {
+        Segment beforeSegment = before.getSegment();
+        Segment afterSegment = after.getSegment();
+        int beforeHead = beforeSegment.readInt(before.getOffset(0));
+        int afterHead = afterSegment.readInt(after.getOffset(0));
+        if (isBranch(getSize(beforeHead), getLevel(beforeHead))
+                && isBranch(getSize(afterHead), getLevel(afterHead))) {
+            RecordId[] beforeBuckets = before.getBuckets(beforeSegment);
+            RecordId[] afterBuckets = after.getBuckets(afterSegment);
+            for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
+                if (Objects.equal(beforeBuckets[i], afterBuckets[i])) {
+                    // do nothing
+                } else if (beforeBuckets[i] == null) {
+                    MapRecord bucket =
+                            new MapRecord(afterSegment, afterBuckets[i]);
+                    for (MapEntry entry : bucket.getEntries()) {
+                        if (!diff.entryAdded(entry)) {
+                            return false;
+                        }
+                    }
+                } else if (afterBuckets[i] == null) {
+                    MapRecord bucket =
+                            new MapRecord(beforeSegment, beforeBuckets[i]);
+                    for (MapEntry entry : bucket.getEntries()) {
+                        if (!diff.entryDeleted(entry)) {
+                            return false;
+                        }
+                    }
+                } else {
+                    MapRecord beforeBucket =
+                            new MapRecord(beforeSegment, beforeBuckets[i]);
+                    MapRecord afterBucket =
+                            new MapRecord(afterSegment, afterBuckets[i]);
+                    if (!compare(beforeBucket, afterBucket, diff)) {
+                        return false;
+                    }
+                }
+            }
+            return true;
+        }
+
+        Iterator<MapEntry> beforeEntries = before.getEntries().iterator();
+        Iterator<MapEntry> afterEntries = after.getEntries().iterator();
+
+        MapEntry beforeEntry = nextOrNull(beforeEntries);
+        MapEntry afterEntry = nextOrNull(afterEntries);
+        while (beforeEntry != null || afterEntry != null) {
+            int d = compare(beforeEntry, afterEntry);
+            if (d < 0) {
+                if (!diff.entryDeleted(beforeEntry)) {
+                    return false;
+                }
+                beforeEntry = nextOrNull(beforeEntries);
+            } else if (d == 0) {
+                if (!diff.entryChanged(beforeEntry, afterEntry)) {
+                    return false;
+                }
+                beforeEntry = nextOrNull(beforeEntries);
+                afterEntry = nextOrNull(afterEntries);
+            } else {
+                if (!diff.entryAdded(afterEntry)) {
+                    return false;
+                }
+                afterEntry = nextOrNull(afterEntries);
+            }
+        }
+
+        return true;
+    }
+
+    private static int getSize(int head) {
+        return head & ((1 << MapRecord.SIZE_BITS) - 1);
+    }
+
+    private static int getLevel(int head) {
+        return head >>> MapRecord.SIZE_BITS;
+    }
+
+    private static boolean isBranch(int size, int level) {
+        return size > MapRecord.BUCKETS_PER_LEVEL
+                && level < MapRecord.MAX_NUMBER_OF_LEVELS;
+    }
+
+    private static int compare(MapEntry before, MapEntry after) {
+        if (before == null) {
+            // A null value signifies the end of the list of entries,
+            // which is why the return value here is a bit counter-intuitive
+            // (null > non-null). The idea is to make a virtual end-of-list
+            // sentinel value appear greater than any normal value.
+            return 1;
+        } else if (after == null) {
+            return -1;  // see above
+        } else {
+            return ComparisonChain.start()
+                    .compare(before.getHash(), after.getHash())
+                    .compare(before.getName(), after.getName())
+                    .result();
+        }
+    }
+
+    private static MapEntry nextOrNull(Iterator<MapEntry> iterator) {
+        if (iterator.hasNext()) {
+            return iterator.next();
+        } else {
+            return null;
+        }
+    }
+
 }

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java Thu Oct  3 19:19:43 2013
@@ -240,20 +240,7 @@ public class Segment {
     }
 
     MapRecord readMap(RecordId id) {
-        return getSegment(id).readMap(id.getOffset());
-    }
-
-    MapRecord readMap(int offset) {
-        int head = readInt(offset);
-        int level = head >>> MapRecord.SIZE_BITS;
-        int size = head & ((1 << MapRecord.SIZE_BITS) - 1);
-        if (size > MapRecord.BUCKETS_PER_LEVEL
-                && level < MapRecord.MAX_NUMBER_OF_LEVELS) {
-            int bitmap = readInt(offset + 4);
-            return new MapBranch(this, offset, size, level, bitmap);
-        } else {
-            return new MapLeaf(this, offset, size, level);
-        }
+        return new MapRecord(this, id);
     }
 
     Template readTemplate(final RecordId id) {

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeState.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeState.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeState.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentNodeState.java Thu Oct  3 19:19:43 2013
@@ -307,9 +307,16 @@ public class SegmentNodeState extends Re
                                 getSegment(), getRecordId(),
                                 that.getSegment(), that.getRecordId());
             }
+        } else if (object instanceof NodeState){
+            return AbstractNodeState.equals(this, (NodeState) object); // TODO
         } else {
-            return super.equals(object); // TODO
+            return false;
         }
     }
 
+    @Override
+    public String toString() {
+        return AbstractNodeState.toString(this);
+    }
+
 }

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java Thu Oct  3 19:19:43 2013
@@ -219,7 +219,7 @@ public class SegmentWriter {
         writeInt((int) value);
     }
 
-    private MapLeaf writeMapLeaf(
+    private MapRecord writeMapLeaf(
             int level, Collection<MapEntry> entries) {
         checkNotNull(entries);
 
@@ -250,7 +250,7 @@ public class SegmentWriter {
             for (MapEntry entry : array) {
                 writeRecordId(entry.getValue());
             }
-            return new MapLeaf(dummySegment, id, size, level);
+            return new MapRecord(dummySegment, id);
         }
     }
 
@@ -271,7 +271,7 @@ public class SegmentWriter {
             for (RecordId id : ids) {
                 writeRecordId(id);
             }
-            return new MapBranch(dummySegment, mapId, size, level, bitmap);
+            return new MapRecord(dummySegment, mapId);
         }
     }
 
@@ -287,7 +287,7 @@ public class SegmentWriter {
     private synchronized MapRecord writeMapBucket(
             RecordId baseId, Collection<MapEntry> entries, int level) {
         int mask = MapRecord.BUCKETS_PER_LEVEL - 1;
-        int shift = level * MapRecord.LEVEL_BITS;
+        int shift = 32 - (level + 1) * MapRecord.LEVEL_BITS;
 
         if (entries == null || entries.isEmpty()) {
             if (baseId != null) {
@@ -296,7 +296,7 @@ public class SegmentWriter {
                 synchronized (this) {
                     RecordId id = prepare(4);
                     writeInt(0);
-                    return new MapLeaf(dummySegment, id, 0, 0);
+                    return new MapRecord(dummySegment, id);
                 }
             } else {
                 return null;
@@ -304,7 +304,7 @@ public class SegmentWriter {
         } else if (baseId != null) {
             // FIXME: messy code with lots of duplication
             MapRecord base = dummySegment.readMap(baseId);
-            if (base instanceof MapLeaf) {
+            if (base.isLeaf()) {
                 Map<String, MapEntry> map = newHashMap();
                 for (MapEntry entry : base.getEntries()) {
                     map.put(entry.getName(), entry);
@@ -327,7 +327,7 @@ public class SegmentWriter {
                 buckets.addAll(Collections.nCopies(
                         BUCKETS_PER_LEVEL, (Collection<MapEntry>) null));
                 for (MapEntry entry : entries) {
-                    int bucketIndex = (entry.hashCode() >> shift) & mask;
+                    int bucketIndex = (entry.getHash() >> shift) & mask;
                     Collection<MapEntry> bucket = buckets.get(bucketIndex);
                     if (bucket == null) {
                         bucket = Lists.newArrayList();
@@ -338,7 +338,7 @@ public class SegmentWriter {
 
                 int newSize = 0;
                 List<MapRecord> newBuckets = Lists.newArrayList();
-                RecordId[] bucketIds = ((MapBranch) base).getBuckets();
+                RecordId[] bucketIds = base.getBuckets();
                 for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
                     MapRecord newBucket = writeMapBucket(
                             bucketIds[i], buckets.get(i), level + 1);
@@ -359,7 +359,7 @@ public class SegmentWriter {
                         synchronized (this) {
                             RecordId id = prepare(4);
                             writeInt(0);
-                            return new MapLeaf(dummySegment, id, 0, 0);
+                            return new MapRecord(dummySegment, id);
                         }
                     } else {
                         return null;
@@ -367,8 +367,6 @@ public class SegmentWriter {
                 } else if (newBuckets.size() == 1) {
                     return newBuckets.iterator().next();
                 } else {
-                    // FIXME: ugly hack, flush() shouldn't be needed here
-                    flush();
                     List<MapEntry> list = Lists.newArrayList();
                     for (MapRecord record : newBuckets) {
                         Iterables.addAll(list, record.getEntries());
@@ -382,7 +380,7 @@ public class SegmentWriter {
         } else {
             List<MapEntry>[] lists = new List[MapRecord.BUCKETS_PER_LEVEL];
             for (MapEntry entry : entries) {
-                int bucketIndex = (entry.hashCode() >> shift) & mask;
+                int bucketIndex = (entry.getHash() >> shift) & mask;
                 if (lists[bucketIndex] == null) {
                     lists[bucketIndex] = Lists.newArrayList();
                 }

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeState.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeState.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeState.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeState.java Thu Oct  3 19:19:43 2013
@@ -175,6 +175,25 @@ public abstract class AbstractNodeState 
         return true;
     }
 
+    public static String toString(NodeState state) {
+        if (!state.exists()) {
+            return "{N/A}";
+        }
+        StringBuilder builder = new StringBuilder("{");
+        String separator = " ";
+        for (PropertyState property : state.getProperties()) {
+            builder.append(separator);
+            separator = ", ";
+            builder.append(property);
+        }
+        for (ChildNodeEntry entry : state.getChildNodeEntries()) {
+            builder.append(separator);
+            separator = ", ";
+            builder.append(entry);
+        }
+        builder.append(" }");
+        return builder.toString();
+    }
 
     @Override
     public boolean hasProperty(String name) {
@@ -268,23 +287,7 @@ public abstract class AbstractNodeState 
      * @return string representation
      */
     public String toString() {
-        if (!exists()) {
-            return "{N/A}";
-        }
-        StringBuilder builder = new StringBuilder("{");
-        String separator = " ";
-        for (PropertyState property : getProperties()) {
-            builder.append(separator);
-            separator = ", ";
-            builder.append(property);
-        }
-        for (ChildNodeEntry entry : getChildNodeEntries()) {
-            builder.append(separator);
-            separator = ", ";
-            builder.append(entry);
-        }
-        builder.append(" }");
-        return builder.toString();
+        return toString(this);
     }
 
     /**
@@ -301,24 +304,24 @@ public abstract class AbstractNodeState 
     public boolean equals(Object that) {
         if (this == that) {
             return true;
-        } else if (that == null || !(that instanceof NodeState)) {
+        } else if (that instanceof NodeState) {
+            return equals(this, (NodeState) that);
+        } else {
             return false;
         }
+    }
 
-        NodeState other = (NodeState) that;
-
-        if (exists() != other.exists()) {
-            return false;
+    public static boolean equals(NodeState a, NodeState b) {
+        if (a.exists() != b.exists()
+                || a.getPropertyCount() != b.getPropertyCount()) {
+            return false; // shortcut
         }
 
-        if (getPropertyCount() != other.getPropertyCount()) {
-            return false;
-        }
         // if one of the objects has few entries,
         // then compare the number of entries with the other one
         long max = 20;
-        long c1 = getChildNodeCount(max);
-        long c2 = other.getChildNodeCount(max);
+        long c1 = a.getChildNodeCount(max);
+        long c2 = b.getChildNodeCount(max);
         if (c1 <= max || c2 <= max) {
             // one has less than max entries
             if (c1 != c2) {
@@ -331,8 +334,8 @@ public abstract class AbstractNodeState 
             }
         }
 
-        for (PropertyState property : getProperties()) {
-            if (!property.equals(other.getProperty(property.getName()))) {
+        for (PropertyState property : a.getProperties()) {
+            if (!property.equals(b.getProperty(property.getName()))) {
                 return false;
             }
         }
@@ -341,17 +344,16 @@ public abstract class AbstractNodeState 
 
         // compare the exact child node count
         // (before, we only compared up to 20 entries)
-        c1 = getChildNodeCount(Long.MAX_VALUE);
-        c2 = other.getChildNodeCount(Long.MAX_VALUE);
+        c1 = a.getChildNodeCount(Long.MAX_VALUE);
+        c2 = b.getChildNodeCount(Long.MAX_VALUE);
         if (c1 != c2) {
             return false;
         }
-        
+
         // compare all child nodes recursively (this is potentially very slow,
         // as it recursively calls equals)
-        for (ChildNodeEntry entry : getChildNodeEntries()) {
-            if (!entry.getNodeState().equals(
-                    other.getChildNode(entry.getName()))) {
+        for (ChildNodeEntry entry : a.getChildNodeEntries()) {
+            if (!entry.getNodeState().equals(b.getChildNode(entry.getName()))) {
                 return false;
             }
         }

Modified: jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentSizeTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentSizeTest.java?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentSizeTest.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/segment/SegmentSizeTest.java Thu Oct  3 19:19:43 2013
@@ -154,7 +154,7 @@ public class SegmentSizeTest {
 
         SegmentNodeState state = writer.writeNode(builder.getNodeState());
         Segment segment = store.readSegment(state.getRecordId().getSegmentId());
-        assertEquals(26784, segment.size());
+        assertEquals(26728, segment.size());
 
         writer.flush(); // force flushing of the previous segment
 
@@ -162,7 +162,7 @@ public class SegmentSizeTest {
         builder.child("child1000");
         state = writer.writeNode(builder.getNodeState());
         segment = store.readSegment(state.getRecordId().getSegmentId());
-        assertEquals(252, segment.size());
+        assertEquals(96, segment.size());
     }
 
     private int getSize(NodeBuilder builder) {

Modified: jackrabbit/oak/trunk/oak-jcr/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-jcr/pom.xml?rev=1528979&r1=1528978&r2=1528979&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-jcr/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-jcr/pom.xml Thu Oct  3 19:19:43 2013
@@ -171,6 +171,7 @@
       org.apache.jackrabbit.core.observation.ShareableNodesTest#testAddShare                         <!-- OAK-118 workspace support needed -->
       org.apache.jackrabbit.core.observation.ShareableNodesTest#testRemoveShare                      <!-- OAK-118 workspace support needed -->
       org.apache.jackrabbit.oak.jcr.query.QueryTest#fnNameEncoding                                   <!-- OAK-1000 -->
+      org.apache.jackrabbit.core.query.SQL2OrderByTest#testOrderByScore                              <!-- JCR-3677 -->
 
       <!-- Node Types -->
       org.apache.jackrabbit.oak.jcr.nodetype.NodeDefinitionTest#testRootType                         <!-- OAK-901 -->