You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by md...@apache.org on 2016/05/23 12:59:23 UTC

svn commit: r1745182 [2/3] - in /jackrabbit/oak/trunk: oak-run/src/main/java/org/apache/jackrabbit/oak/run/ oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/ oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/ oak-segme...

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentTracker.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentTracker.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentTracker.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentTracker.java Mon May 23 12:59:22 2016
@@ -18,24 +18,15 @@
  */
 package org.apache.jackrabbit.oak.segment;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.collect.Sets.newHashSet;
-import static java.lang.Long.getLong;
 
 import java.security.SecureRandom;
 import java.util.Set;
-import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import javax.annotation.CheckForNull;
 import javax.annotation.Nonnull;
 
 import com.google.common.base.Predicate;
-import com.google.common.cache.RemovalCause;
-import org.apache.jackrabbit.oak.cache.CacheLIRS;
-import org.apache.jackrabbit.oak.cache.CacheLIRS.EvictionCallback;
-import org.apache.jackrabbit.oak.cache.CacheStats;
-import org.apache.jackrabbit.oak.segment.file.FileStore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -46,12 +37,7 @@ import org.slf4j.LoggerFactory;
  * It is also responsible to cache segment objects in memory.
  */
 public class SegmentTracker {
-
-    /** Logger instance */
-    private static final Logger log =
-            LoggerFactory.getLogger(SegmentTracker.class);
-
-    static final String STRING_CACHE_SIZE = "oak.segment.stringCache";
+    private static final Logger log = LoggerFactory.getLogger(SegmentTracker.class);
 
     private static final long MSB_MASK = ~(0xfL << 12);
 
@@ -63,19 +49,11 @@ public class SegmentTracker {
 
     private static final long BULK = 0xBL << 60;
 
-    private static final long MB = 1024 * 1024;
-
-    private static final int DEFAULT_MEMORY_CACHE_SIZE = 256;
-
     /**
      * The random number source for generating new segment identifiers.
      */
     private final SecureRandom random = new SecureRandom();
 
-    private final SegmentStore store;
-
-    private final SegmentWriter writer;
-
     /**
      * Hash table of weak references to segment identifiers that are
      * currently being accessed. The size of the table is always a power
@@ -88,50 +66,14 @@ public class SegmentTracker {
     private final SegmentIdTable[] tables = new SegmentIdTable[32];
 
     /**
-     * Cache for string records
-     */
-    private final StringCache stringCache;
-
-    /**
-     * Cache of recently accessed segments
-     */
-    private final CacheLIRS<SegmentId, Segment> segmentCache;
-
-    /**
      * Number of segment tracked since this tracker was instantiated
      */
     private final AtomicInteger segmentCounter = new AtomicInteger();
 
-    public SegmentTracker(SegmentStore store, int cacheSizeMB, SegmentVersion version) {
-        checkArgument(SegmentVersion.isValid(version));
-
+    public SegmentTracker(@Nonnull SegmentStore store) {
         for (int i = 0; i < tables.length; i++) {
-            tables[i] = new SegmentIdTable(this);
+            tables[i] = new SegmentIdTable(store);
         }
-
-        this.store = store;
-        this.writer = new SegmentWriter(store, new SegmentBufferWriterPool(store, version, "sys"));
-        stringCache = new StringCache(getLong(STRING_CACHE_SIZE, (long) cacheSizeMB) * MB);
-        segmentCache = CacheLIRS.<SegmentId, Segment>newBuilder()
-            .module("SegmentTracker")
-            .maximumWeight((long) cacheSizeMB * MB)
-            .averageWeight(Segment.MAX_SEGMENT_SIZE/2)
-            .evictionCallback(new EvictionCallback<SegmentId, Segment>() {
-                @Override
-                public void evicted(SegmentId segmentId, Segment segment, RemovalCause cause) {
-                    if (segment != null) {
-                        segmentId.setSegment(null);
-                    }
-                }
-            }).build();
-    }
-
-    public SegmentTracker(SegmentStore store, SegmentVersion version) {
-        this(store, DEFAULT_MEMORY_CACHE_SIZE, version);
-    }
-
-    public SegmentTracker(SegmentStore store) {
-        this(store, DEFAULT_MEMORY_CACHE_SIZE, SegmentVersion.LATEST_VERSION);
     }
 
     /**
@@ -142,94 +84,6 @@ public class SegmentTracker {
         return segmentCounter.get();
     }
 
-    public boolean isTracking(SegmentId segmentId) {
-        return this == segmentId.getTracker();
-    }
-
-    @Nonnull
-    public CacheStats getSegmentCacheStats() {
-        return new CacheStats(segmentCache, "Segment Cache", null, -1);
-    }
-
-    @CheckForNull
-    public CacheStats getStringCacheStats() {
-        return stringCache.getStats();
-    }
-
-    public SegmentWriter getWriter() {
-        return writer;
-    }
-
-    public SegmentStore getStore() {
-        return store;
-    }
-
-    /**
-     * Clear the caches
-     */
-    public synchronized void clearCache() {
-        segmentCache.invalidateAll();
-        stringCache.clear();
-    }
-
-    /**
-     * Get the string cache, if there is one.
-     *
-     * @return the string cache or {@code null} if none is configured
-     */
-    StringCache getStringCache() {
-        return stringCache;
-    }
-
-    /**
-     * Get a segment from the cache
-     * @param id  segment id
-     * @return  segment with the given {@code id} or {@code null} if not in the cache
-     */
-    Segment getCachedSegment(SegmentId id) {
-        try {
-            return segmentCache.get(id);
-        } catch (ExecutionException e) {
-            log.error("Error reading from segment cache", e);
-            return null;
-        }
-    }
-
-    /**
-     * Read a segment from the underlying segment store.
-     * @param id  segment id
-     * @return  segment with the given id
-     * @throws SegmentNotFoundException  if no segment with the given {@code id} exists.
-     */
-    Segment readSegment(SegmentId id) {
-        try {
-            Segment segment = store.readSegment(id);
-            setSegment(id, segment);
-            return segment;
-        } catch (SegmentNotFoundException snfe) {
-            long delta = System.currentTimeMillis() - id.getCreationTime();
-            log.error("Segment not found: {}. Creation date delta is {} ms.",
-                    id, delta, snfe);
-            throw snfe;
-        }
-    }
-
-    void setSegment(SegmentId id, Segment segment) {
-        id.setSegment(segment);
-        segmentCache.put(id, segment, segment.size());
-    }
-
-    // FIXME OAK-4102: Break cyclic dependency of FileStore and SegmentTracker
-    // Improve retrieving current GC generation. (OAK-4102)
-    // See also the comments in FileStore regarding initialisation and cyclic dependencies.
-    public int getGcGeneration() {
-        if (store instanceof FileStore) {
-            return ((FileStore) store).getGcGeneration();
-        } else {
-            return 0;
-        }
-    }
-
     /**
      * Returns all segment identifiers that are currently referenced in memory.
      *
@@ -272,6 +126,8 @@ public class SegmentTracker {
     // FIXME OAK-4285: Align cleanup of segment id tables with the new cleanup strategy
     // ith clean brutal we need to remove those ids that have been cleaned
     // i.e. those whose segment was from an old generation
+    // Instead of removing, mark affected ids as gc'ed so the SNFE caused by
+    // any subsequent access can report a precise cause
     public synchronized void clearSegmentIdTables(Predicate<SegmentId> canRemove) {
         for (SegmentIdTable table : tables) {
             table.clearSegmentIdTables(canRemove);

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentWriter.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentWriter.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentWriter.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentWriter.java Mon May 23 12:59:22 2016
@@ -117,13 +117,13 @@ public class SegmentWriter {
 
     @Nonnull
     public MapRecord writeMap(final MapRecord base, final Map<String, RecordId> changes) throws IOException {
-        return new MapRecord(
-            writeOperationHandler.execute(new SegmentWriteOperation() {
-                @Override
-                public RecordId execute(SegmentBufferWriter writer) throws IOException {
-                    return with(writer).writeMap(base, changes);
-                }
-            }));
+        RecordId mapId = writeOperationHandler.execute(new SegmentWriteOperation() {
+            @Override
+            public RecordId execute(SegmentBufferWriter writer) throws IOException {
+                return with(writer).writeMap(base, changes);
+            }
+        });
+        return new MapRecord(store, mapId);
     }
 
     @Nonnull
@@ -148,13 +148,13 @@ public class SegmentWriter {
 
     @Nonnull
     public SegmentBlob writeBlob(final Blob blob) throws IOException {
-        return new SegmentBlob(
-            writeOperationHandler.execute(new SegmentWriteOperation() {
-                @Override
-                public RecordId execute(SegmentBufferWriter writer) throws IOException {
-                    return with(writer).writeBlob(blob);
-                }
-            }));
+        RecordId blobId = writeOperationHandler.execute(new SegmentWriteOperation() {
+            @Override
+            public RecordId execute(SegmentBufferWriter writer) throws IOException {
+                return with(writer).writeBlob(blob);
+            }
+        });
+        return new SegmentBlob(store, blobId);
     }
 
     /**
@@ -185,13 +185,13 @@ public class SegmentWriter {
      */
     @Nonnull
     public SegmentBlob writeStream(final InputStream stream) throws IOException {
-        return new SegmentBlob(
-            writeOperationHandler.execute(new SegmentWriteOperation() {
-                @Override
-                public RecordId execute(SegmentBufferWriter writer) throws IOException {
-                    return with(writer).writeStream(stream);
-                }
-            }));
+        RecordId blobId = writeOperationHandler.execute(new SegmentWriteOperation() {
+            @Override
+            public RecordId execute(SegmentBufferWriter writer) throws IOException {
+                return with(writer).writeStream(stream);
+            }
+        });
+        return new SegmentBlob(store, blobId);
     }
 
     @Nonnull
@@ -202,7 +202,7 @@ public class SegmentWriter {
                 return with(writer).writeProperty(state);
             }
         });
-        return new SegmentPropertyState(id, state.getName(), state.getType());
+        return new SegmentPropertyState(store, id, state.getName(), state.getType());
     }
 
     /**
@@ -213,17 +213,19 @@ public class SegmentWriter {
      */
     @Nonnull
     public SegmentNodeState writeNode(final NodeState state) throws IOException {
-        return new SegmentNodeState(
-            writeOperationHandler.execute(new SegmentWriteOperation() {
-                @Override
-                public RecordId execute(SegmentBufferWriter writer) throws IOException {
-                    return with(writer).writeNode(state, 0);
-                }
-            }));
+        RecordId nodeId = writeOperationHandler.execute(new SegmentWriteOperation() {
+            @Override
+            public RecordId execute(SegmentBufferWriter writer) throws IOException {
+                return with(writer).writeNode(state, 0);
+            }
+        });
+        return new SegmentNodeState(store, nodeId);
     }
 
     /**
-     * Write a node state, unless cancelled using a dedicated write operation handler
+     * Write a node state, unless cancelled using a dedicated write operation handler.
+     * The write operation handler is automatically {@link WriteOperationHandler#flush() flushed}
+     * once the node has been written successfully.
      * @param state   node state to write
      * @param writeOperationHandler  the write operation handler through which all write calls
      *                               induced by by this call are routed.
@@ -237,12 +239,14 @@ public class SegmentWriter {
                                       Supplier<Boolean> cancel)
     throws IOException {
         try {
-            return new SegmentNodeState(writeOperationHandler.execute(new SegmentWriteOperation(cancel) {
+            RecordId nodeId = writeOperationHandler.execute(new SegmentWriteOperation(cancel) {
                 @Override
                 public RecordId execute(SegmentBufferWriter writer) throws IOException {
                     return with(writer).writeNode(state, 0);
                 }
-            }));
+            });
+            writeOperationHandler.flush();
+            return new SegmentNodeState(store, nodeId);
         } catch (SegmentWriteOperation.CancelledWriteException ignore) {
             return null;
         }
@@ -297,11 +301,11 @@ public class SegmentWriter {
             if (base != null && base.isDiff()) {
                 Segment segment = base.getSegment();
                 RecordId key = segment.readRecordId(base.getOffset(8));
-                String name = Segment.readString(key);
+                String name = store.getReader().readString(key);
                 if (!changes.containsKey(name)) {
                     changes.put(name, segment.readRecordId(base.getOffset(8, 1)));
                 }
-                base = new MapRecord(segment.readRecordId(base.getOffset(8, 2)));
+                base = new MapRecord(store, segment.readRecordId(base.getOffset(8, 2)));
             }
 
             if (base != null && changes.size() == 1) {
@@ -337,7 +341,7 @@ public class SegmentWriter {
                 }
 
                 if (keyId != null) {
-                    entries.add(new MapEntry(key, keyId, entry.getValue()));
+                    entries.add(new MapEntry(store, key, keyId, entry.getValue()));
                 }
             }
             return writeMapBucket(base, entries, 0);
@@ -450,7 +454,7 @@ public class SegmentWriter {
         }
 
         private MapRecord mapRecordOrNull(RecordId id) {
-            return id == null ? null : new MapRecord(id);
+            return id == null ? null : new MapRecord(store, id);
         }
 
         /**
@@ -538,7 +542,7 @@ public class SegmentWriter {
 
             // write as many full bulk segments as possible
             while (pos + Segment.MAX_SEGMENT_SIZE <= data.length) {
-                SegmentId bulkId = getTracker().newBulkSegmentId();
+                SegmentId bulkId = store.getTracker().newBulkSegmentId();
                 store.writeSegment(bulkId, data, pos, Segment.MAX_SEGMENT_SIZE);
                 for (int i = 0; i < Segment.MAX_SEGMENT_SIZE; i += BLOCK_SIZE) {
                     blockIds.add(new RecordId(bulkId, i));
@@ -556,18 +560,22 @@ public class SegmentWriter {
             return writeValueRecord(data.length, writeList(blockIds));
         }
 
+        private boolean sameStore(SegmentId id) {
+            return id.sameStore(store);
+        }
+
         /**
          * @param   blob
          * @return  {@code true} iff {@code blob} is a {@code SegmentBlob}
-         *          and this store's tracker has its segment.
+         *          and originates from the same segment store.
          */
-        private boolean hasSegment(Blob blob) {
+        private boolean sameStore(Blob blob) {
             return (blob instanceof SegmentBlob)
-                    && (getTracker().isTracking(((Record) blob).getRecordId().getSegmentId()));
+                    && sameStore(((Record) blob).getRecordId().getSegmentId());
         }
 
         private RecordId writeBlob(Blob blob) throws IOException {
-            if (hasSegment(blob)) {
+            if (sameStore(blob)) {
                 SegmentBlob segmentBlob = (SegmentBlob) blob;
                 if (!isOldGeneration(segmentBlob.getRecordId())) {
                     return segmentBlob.getRecordId();
@@ -660,7 +668,7 @@ public class SegmentWriter {
 
             // Write the data to bulk segments and collect the list of block ids
             while (n != 0) {
-                SegmentId bulkId = getTracker().newBulkSegmentId();
+                SegmentId bulkId = store.getTracker().newBulkSegmentId();
                 int len = Segment.align(n, 1 << Segment.RECORD_ALIGN_BITS);
                 LOG.debug("Writing bulk segment {} ({} bytes)", bulkId, n);
                 store.writeSegment(bulkId, data, 0, len);
@@ -794,7 +802,7 @@ public class SegmentWriter {
             }
             if (state instanceof SegmentNodeState) {
                 SegmentNodeState sns = ((SegmentNodeState) state);
-                if (hasSegment(sns)) {
+                if (sameStore(sns)) {
                     // This is a segment node state from an old generation. Check whether
                     // an equivalent one of the current generation is in the cache
                     if (isOldGeneration(sns.getRecordId())) {
@@ -830,7 +838,7 @@ public class SegmentWriter {
                 NodeState base = after.getBaseState();
                 if (base instanceof SegmentNodeState) {
                     SegmentNodeState sns = ((SegmentNodeState) base);
-                    if (hasSegment(sns)) {
+                    if (sameStore(sns)) {
                         if (!isOldGeneration(sns.getRecordId())) {
                             before = sns;
                             beforeTemplate = before.getTemplate();
@@ -840,7 +848,7 @@ public class SegmentWriter {
             }
 
             List<RecordId> ids = newArrayList();
-            Template template = new Template(state);
+            Template template = new Template(store, state);
             if (template.equals(beforeTemplate)) {
                 ids.add(before.getTemplateId());
             } else {
@@ -876,14 +884,14 @@ public class SegmentWriter {
                 PropertyState property = state.getProperty(name);
                 assert property != null;
 
-                if (hasSegment(property)) {
+                if (sameStore(property)) {
                     RecordId pid = ((Record) property).getRecordId();
                     if (isOldGeneration(pid)) {
                         pIds.add(writeProperty(property));
                     } else {
                         pIds.add(pid);
                     }
-                } else if (before == null || !hasSegment(before)) {
+                } else if (before == null || !sameStore(before)) {
                     pIds.add(writeProperty(property));
                 } else {
                     // reuse previously stored property, if possible
@@ -920,20 +928,20 @@ public class SegmentWriter {
 
         /**
          * @param   node
-         * @return  {@code true} iff this store's tracker has the segment of {@code node}.
+         * @return  {@code true} iff {@code node} originates from the same segment store.
          */
-        private boolean hasSegment(SegmentNodeState node) {
-            return getTracker().isTracking(node.getRecordId().getSegmentId());
+        private boolean sameStore(SegmentNodeState node) {
+            return sameStore(node.getRecordId().getSegmentId());
         }
 
         /**
          * @param property
          * @return  {@code true} iff {@code property} is a {@code SegmentPropertyState}
-         *          and this store's tracker has its segment.
+         *          and originates from the same segment store.
          */
-        private boolean hasSegment(PropertyState property) {
+        private boolean sameStore(PropertyState property) {
             return (property instanceof SegmentPropertyState)
-                && (getTracker().isTracking(((Record) property).getRecordId().getSegmentId()));
+                && sameStore(((Record) property).getRecordId().getSegmentId());
         }
 
         private boolean isOldGeneration(RecordId id) {
@@ -991,8 +999,4 @@ public class SegmentWriter {
         }
     }
 
-    private SegmentTracker getTracker() {
-        return store.getTracker();
-    }
-
 }

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Template.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Template.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Template.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Template.java Mon May 23 12:59:22 2016
@@ -30,6 +30,7 @@ import java.util.List;
 
 import javax.annotation.CheckForNull;
 import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 
 import com.google.common.base.Objects;
 import com.google.common.collect.Lists;
@@ -57,6 +58,9 @@ public class Template {
 
     static final String MANY_CHILD_NODES = "";
 
+    @Nonnull
+    private final SegmentStore store;
+
     /**
      * The {@code jcr:primaryType} property, if present as a single-valued
      * {@code NAME} property. Otherwise {@code null}.
@@ -86,8 +90,12 @@ public class Template {
     @CheckForNull
     private final String childName;
 
-    Template(PropertyState primaryType, PropertyState mixinTypes,
-            PropertyTemplate[] properties, String childName) {
+    Template(@Nonnull SegmentStore store,
+             @Nullable PropertyState primaryType,
+             @Nullable PropertyState mixinTypes,
+             @Nullable  PropertyTemplate[] properties,
+             @Nullable String childName) {
+        this.store = store;
         this.primaryType = primaryType;
         this.mixinTypes = mixinTypes;
         if (properties != null) {
@@ -99,7 +107,8 @@ public class Template {
         this.childName = childName;
     }
 
-    Template(NodeState state) {
+    Template(@Nonnull SegmentStore store, @Nonnull NodeState state) {
+        this.store = store;
         PropertyState primary = null;
         PropertyState mixins = null;
         List<PropertyTemplate> templates = Lists.newArrayList();
@@ -133,10 +142,12 @@ public class Template {
         }
     }
 
+    @CheckForNull
     PropertyState getPrimaryType() {
         return primaryType;
     }
 
+    @CheckForNull
     PropertyState getMixinTypes() {
         return mixinTypes;
     }
@@ -171,6 +182,7 @@ public class Template {
         return null;
     }
 
+    @CheckForNull
     String getChildName() {
         return childName;
     }
@@ -186,7 +198,7 @@ public class Template {
         RecordId lid = segment.readRecordId(offset);
         ListRecord props = new ListRecord(lid, properties.length);
         RecordId rid = props.getEntry(index);
-        return new SegmentPropertyState(rid, properties[index]);
+        return new SegmentPropertyState(store, rid, properties[index]);
     }
 
     MapRecord getChildNodeMap(RecordId recordId) {
@@ -194,7 +206,7 @@ public class Template {
         Segment segment = recordId.getSegment();
         int offset = recordId.getOffset() + 2 * RECORD_ID_BYTES;
         RecordId childNodesId = segment.readRecordId(offset);
-        return segment.readMap(childNodesId);
+        return store.getReader().readMap(store, childNodesId);
     }
 
     public NodeState getChildNode(String name, RecordId recordId) {
@@ -212,7 +224,7 @@ public class Template {
             Segment segment = recordId.getSegment();
             int offset = recordId.getOffset() + 2 * RECORD_ID_BYTES;
             RecordId childNodeId = segment.readRecordId(offset);
-            return new SegmentNodeState(childNodeId);
+            return new SegmentNodeState(store, childNodeId);
         } else {
             return MISSING_NODE;
         }
@@ -229,7 +241,7 @@ public class Template {
             int offset = recordId.getOffset() + 2 * RECORD_ID_BYTES;
             RecordId childNodeId = segment.readRecordId(offset);
             return Collections.singletonList(new MemoryChildNodeEntry(
-                    childName, new SegmentNodeState(childNodeId)));
+                    childName, new SegmentNodeState(store, childNodeId)));
         }
     }
 

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java Mon May 23 12:59:22 2016
@@ -36,6 +36,7 @@ import static java.util.concurrent.TimeU
 import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount;
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
 import static org.apache.jackrabbit.oak.segment.SegmentId.isDataSegmentId;
+import static org.apache.jackrabbit.oak.segment.SegmentReaderImpl.DEFAULT_STRING_CACHE_MB;
 
 import java.io.Closeable;
 import java.io.File;
@@ -51,6 +52,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
@@ -66,20 +69,27 @@ import com.google.common.base.Predicate;
 import com.google.common.base.Predicates;
 import com.google.common.base.Stopwatch;
 import com.google.common.base.Supplier;
+import com.google.common.base.Suppliers;
 import org.apache.jackrabbit.oak.api.Blob;
+import org.apache.jackrabbit.oak.cache.CacheStats;
 import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob;
 import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector;
 import org.apache.jackrabbit.oak.segment.RecordId;
 import org.apache.jackrabbit.oak.segment.Segment;
 import org.apache.jackrabbit.oak.segment.SegmentBufferWriter;
+import org.apache.jackrabbit.oak.segment.SegmentBufferWriterPool;
+import org.apache.jackrabbit.oak.segment.SegmentCache;
 import org.apache.jackrabbit.oak.segment.SegmentGraph.SegmentGraphVisitor;
 import org.apache.jackrabbit.oak.segment.SegmentId;
 import org.apache.jackrabbit.oak.segment.SegmentNodeState;
 import org.apache.jackrabbit.oak.segment.SegmentNodeStore;
 import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
+import org.apache.jackrabbit.oak.segment.SegmentReader;
+import org.apache.jackrabbit.oak.segment.SegmentReaderImpl;
 import org.apache.jackrabbit.oak.segment.SegmentStore;
 import org.apache.jackrabbit.oak.segment.SegmentTracker;
 import org.apache.jackrabbit.oak.segment.SegmentVersion;
+import org.apache.jackrabbit.oak.segment.SegmentWriter;
 import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
@@ -118,6 +128,10 @@ public class FileStore implements Segmen
 
     private final SegmentTracker tracker;
 
+    private final SegmentWriter segmentWriter;
+
+    private final SegmentReader segmentReader;
+
     private final File directory;
 
     private final BlobStore blobStore;
@@ -215,6 +229,9 @@ public class FileStore implements Segmen
 
     private final FileStoreStats stats;
 
+    @Nonnull
+    private final SegmentCache segmentCache;
+
     /**
      * Create a new instance of a {@link Builder} for a file store.
      * @param directory  directory where the tar files are stored
@@ -413,13 +430,31 @@ public class FileStore implements Segmen
         // the initial node state. Notably before this instance is fully initialised!
         // Once consequence of this is that we cannot reliably determine the current
         // GC generation while writing the initial head state. See further below.
+
+        this.tracker = new SegmentTracker(this);
+
+        // FIXME OAK-4373 refactor cache size configurations
+        if (builder.cacheSize < 0) {
+            this.segmentCache = new SegmentCache(0);
+        } else if (builder.cacheSize > 0) {
+            this.segmentCache = new SegmentCache(builder.cacheSize);
+        } else {
+            this.segmentCache = new SegmentCache(DEFAULT_STRING_CACHE_MB);
+        }
         if (builder.cacheSize < 0) {
-            this.tracker = new SegmentTracker(this, 0, version);
+            this.segmentReader = new SegmentReaderImpl(0);
         } else if (builder.cacheSize > 0) {
-            this.tracker = new SegmentTracker(this, builder.cacheSize, version);
+            this.segmentReader = new SegmentReaderImpl(builder.cacheSize);
         } else {
-            this.tracker = new SegmentTracker(this, version);
+            this.segmentReader = new SegmentReaderImpl();
         }
+        this.segmentWriter = new SegmentWriter(this,
+                new SegmentBufferWriterPool(this, version, "sys", new Supplier<Integer>() {
+                    @Override
+                    public Integer get() {
+                        return getGcGeneration();
+                    }
+                }));
         this.blobStore = builder.blobStore;
         this.directory = builder.directory;
         this.maxFileSize = builder.maxFileSize * MB;
@@ -467,8 +502,7 @@ public class FileStore implements Segmen
         }
 
         RecordId id = null;
-        JournalReader journalReader = new JournalReader(new File(directory, JOURNAL_FILE_NAME));
-        try {
+        try (JournalReader journalReader = new JournalReader(new File(directory, JOURNAL_FILE_NAME))) {
             Iterator<String> heads = journalReader.iterator();
             while (id == null && heads.hasNext()) {
                 String head = heads.next();
@@ -486,8 +520,6 @@ public class FileStore implements Segmen
                     log.warn("Skipping invalid record id {}", head);
                 }
             }
-        } finally {
-            journalReader.close();
         }
 
         journalFile.seek(journalFile.length());
@@ -502,14 +534,14 @@ public class FileStore implements Segmen
         }
 
         if (id != null) {
-            head = new AtomicReference<RecordId>(id);
-            persistedHead = new AtomicReference<RecordId>(id);
+            head = new AtomicReference<>(id);
+            persistedHead = new AtomicReference<>(id);
         } else {
             NodeBuilder nodeBuilder = EMPTY_NODE.builder();
             nodeBuilder.setChildNode("root", builder.root);
-            head = new AtomicReference<RecordId>(tracker.getWriter().writeNode(
-                    nodeBuilder.getNodeState()).getRecordId());
-            persistedHead = new AtomicReference<RecordId>(null);
+            head = new AtomicReference<>(writeNode(
+                builder.root, segmentWriter, new SegmentBufferWriter(this, version, "init", 0)));
+            persistedHead = new AtomicReference<>(null);
         }
 
         if (!readOnly) {
@@ -564,18 +596,23 @@ public class FileStore implements Segmen
         log.debug("TarMK readers {}", this.readers);
     }
 
-    // FIXME OAK-4102: Break cyclic dependency of FileStore and SegmentTracker
-    // We cannot determine the current GC generation before the FileStore is fully
-    // initialised so just return 0 for now.
-    public int getGcGeneration() {
-        if (head == null) {
-            return 0;  // not fully initialised
-        }
-        RecordId headId = head.get();
-        if (headId == null) {
-            return 0;  // not fully initialised
-        }
-        return headId.getSegment().getGcGeneration();
+    private static RecordId writeNode(NodeState root, SegmentWriter writer,
+                                      SegmentBufferWriter bufferWriter)
+    throws IOException {
+        NodeBuilder nodeBuilder = EMPTY_NODE.builder();
+        nodeBuilder.setChildNode("root", root);
+        SegmentNodeState node = writer.writeNode(nodeBuilder.getNodeState(), bufferWriter, Suppliers.ofInstance(false));
+        assert node != null;
+        return node.getRecordId();
+    }
+
+    private int getGcGeneration() {
+        return head.get().getSegment().getGcGeneration();
+    }
+
+    @Nonnull
+    public CacheStats getSegmentCacheStats() {
+        return segmentCache.getCacheStats();
     }
 
     public void maybeCompact(boolean cleanup) throws IOException {
@@ -804,7 +841,7 @@ public class FileStore implements Segmen
             RecordId after = head.get();
 
             if (cleanup || !after.equals(before)) {
-                tracker.getWriter().flush();
+                segmentWriter.flush();
 
                 // FIXME OAK-4291: FileStore.flush prone to races leading to corruption
                 // There is a small windows that could lead to a corrupted store:
@@ -880,7 +917,7 @@ public class FileStore implements Segmen
                     GC_COUNT, humanReadableByteCount(initialSize), initialSize);
 
             newWriter();
-            tracker.clearCache();
+            segmentCache.clear();
 
             // Suggest to the JVM that now would be a good time
             // to clear stale weak references in the SegmentTracker
@@ -975,7 +1012,7 @@ public class FileStore implements Segmen
      * @param collector  reference collector called back for each blob reference found
      */
     public void collectBlobReferences(ReferenceCollector collector) throws IOException {
-        tracker.getWriter().flush();
+        segmentWriter.flush();
         List<TarReader> tarReaders = newArrayList();
         fileStoreLock.writeLock().lock();
         try {
@@ -1069,7 +1106,7 @@ public class FileStore implements Segmen
                     GC_COUNT, existing);
         }
 
-        final int newGeneration = tracker.getGcGeneration() + 1;
+        final int newGeneration = getGcGeneration() + 1;
         SegmentBufferWriter bufferWriter = new SegmentBufferWriter(
                 this, version, "c", newGeneration);
         Supplier<Boolean> cancel = newCancelCompactionCondition();
@@ -1124,7 +1161,7 @@ public class FileStore implements Segmen
             }
 
             if (success) {
-                tracker.getWriter().evictCaches(new Predicate<Integer>() {
+                segmentWriter.evictCaches(new Predicate<Integer>() {
                     @Override
                     public boolean apply(Integer generation) {
                         return generation < newGeneration;
@@ -1144,7 +1181,7 @@ public class FileStore implements Segmen
                         GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles - 1);
                 return true;
             } else {
-                tracker.getWriter().evictCaches(new Predicate<Integer>() {
+                segmentWriter.evictCaches(new Predicate<Integer>() {
                     @Override
                     public boolean apply(Integer generation) {
                         return generation == newGeneration;
@@ -1167,11 +1204,7 @@ public class FileStore implements Segmen
     private SegmentNodeState compact(SegmentBufferWriter bufferWriter, NodeState node,
                                      Supplier<Boolean> cancel)
     throws IOException {
-        SegmentNodeState compacted = tracker.getWriter().writeNode(node, bufferWriter, cancel);
-        if (compacted != null) {
-            bufferWriter.flush();
-        }
-        return compacted;
+        return segmentWriter.writeNode(node, bufferWriter, cancel);
     }
 
     private boolean forceCompact(SegmentBufferWriter bufferWriter, Supplier<Boolean> cancel)
@@ -1224,8 +1257,18 @@ public class FileStore implements Segmen
     }
 
     @Override
+    public SegmentWriter getWriter() {
+        return segmentWriter;
+    }
+
+    @Override
+    public SegmentReader getReader() {
+        return segmentReader;
+    }
+
+    @Override
     public SegmentNodeState getHead() {
-        return new SegmentNodeState(head.get());
+        return new SegmentNodeState(this, head.get());
     }
 
     // FIXME OAK-4015: Expedite commits from the compactor
@@ -1324,76 +1367,87 @@ public class FileStore implements Segmen
     }
 
     @Override
-    public Segment readSegment(SegmentId id) {
-        long msb = id.getMostSignificantBits();
-        long lsb = id.getLeastSignificantBits();
-
-        for (TarReader reader : readers) {
-            try {
-                if (reader.isClosed()) {
-                    // Cleanup might already have closed the file.
-                    // The segment should be available from another file.
-                    log.debug("Skipping closed tar file {}", reader);
-                    continue;
-                }
+    public Segment readSegment(final SegmentId id) {
+        try {
+            return segmentCache.geSegment(id, new Callable<Segment>() {
+                @Override
+                public Segment call() throws Exception {
+                    long msb = id.getMostSignificantBits();
+                    long lsb = id.getLeastSignificantBits();
+
+                    for (TarReader reader : readers) {
+                        try {
+                            if (reader.isClosed()) {
+                                // Cleanup might already have closed the file.
+                                // The segment should be available from another file.
+                                log.debug("Skipping closed tar file {}", reader);
+                                continue;
+                            }
 
-                ByteBuffer buffer = reader.readEntry(msb, lsb);
-                if (buffer != null) {
-                    return new Segment(tracker, id, buffer);
-                }
-            } catch (IOException e) {
-                log.warn("Failed to read from tar file {}", reader, e);
-            }
-        }
+                            ByteBuffer buffer = reader.readEntry(msb, lsb);
+                            if (buffer != null) {
+                                return new Segment(FileStore.this, id, buffer);
+                            }
+                        } catch (IOException e) {
+                            log.warn("Failed to read from tar file {}", reader, e);
+                        }
+                    }
 
-        if (writer != null) {
-            fileStoreLock.readLock().lock();
-            try {
-                try {
-                    ByteBuffer buffer = writer.readEntry(msb, lsb);
-                    if (buffer != null) {
-                        return new Segment(tracker, id, buffer);
+                    if (writer != null) {
+                        fileStoreLock.readLock().lock();
+                        try {
+                            try {
+                                ByteBuffer buffer = writer.readEntry(msb, lsb);
+                                if (buffer != null) {
+                                    return new Segment(FileStore.this, id, buffer);
+                                }
+                            } catch (IOException e) {
+                                log.warn("Failed to read from tar file {}", writer, e);
+                            }
+                        } finally {
+                            fileStoreLock.readLock().unlock();
+                        }
                     }
-                } catch (IOException e) {
-                    log.warn("Failed to read from tar file {}", writer, e);
-                }
-            } finally {
-                fileStoreLock.readLock().unlock();
-            }
-        }
 
-        // the writer might have switched to a new file,
-        // so we need to re-check the readers
-        for (TarReader reader : readers) {
-            try {
-                if (reader.isClosed()) {
-                    // Cleanup might already have closed the file.
-                    // The segment should be available from another file.
-                    log.info("Skipping closed tar file {}", reader);
-                    continue;
-                }
+                    // the writer might have switched to a new file,
+                    // so we need to re-check the readers
+                    for (TarReader reader : readers) {
+                        try {
+                            if (reader.isClosed()) {
+                                // Cleanup might already have closed the file.
+                                // The segment should be available from another file.
+                                log.info("Skipping closed tar file {}", reader);
+                                continue;
+                            }
+
+                            ByteBuffer buffer = reader.readEntry(msb, lsb);
+                            if (buffer != null) {
+                                return new Segment(FileStore.this, id, buffer);
+                            }
+                        } catch (IOException e) {
+                            log.warn("Failed to read from tar file {}", reader, e);
+                        }
+                    }
 
-                ByteBuffer buffer = reader.readEntry(msb, lsb);
-                if (buffer != null) {
-                    return new Segment(tracker, id, buffer);
+                    throw new SegmentNotFoundException(id);
                 }
-            } catch (IOException e) {
-                log.warn("Failed to read from tar file {}", reader, e);
-            }
+            });
+        } catch (ExecutionException e) {
+            throw e.getCause() instanceof SegmentNotFoundException
+                ? (SegmentNotFoundException) e.getCause()
+                : new SegmentNotFoundException(id, e);
         }
-
-        throw new SegmentNotFoundException(id);
     }
 
     @Override
-    public void writeSegment(SegmentId id, byte[] data, int offset, int length) throws IOException {
+    public void writeSegment(SegmentId id, byte[] buffer, int offset, int length) throws IOException {
         fileStoreLock.writeLock().lock();
         try {
-            int generation = Segment.getGcGeneration(wrap(data, offset, length), id.asUUID());
+            int generation = Segment.getGcGeneration(wrap(buffer, offset, length), id.asUUID());
             long size = writer.writeEntry(
                     id.getMostSignificantBits(),
                     id.getLeastSignificantBits(),
-                    data, offset, length, generation);
+                    buffer, offset, length, generation);
             if (size >= maxFileSize) {
                 newWriter();
             }
@@ -1401,6 +1455,19 @@ public class FileStore implements Segmen
         } finally {
             fileStoreLock.writeLock().unlock();
         }
+
+        // Keep this data segment in memory as it's likely to be accessed soon
+        if (id.isDataSegmentId()) {
+            ByteBuffer data;
+            if (offset > 4096) {
+                data = ByteBuffer.allocate(length);
+                data.put(buffer, offset, length);
+                data.rewind();
+            } else {
+                data = ByteBuffer.wrap(buffer, offset, length);
+            }
+            segmentCache.putSegment(new Segment(this, id, data));
+        }
     }
 
     /**

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/http/HttpStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/http/HttpStore.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/http/HttpStore.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/http/HttpStore.java Mon May 23 12:59:22 2016
@@ -19,6 +19,7 @@
 package org.apache.jackrabbit.oak.segment.http;
 
 import static com.google.common.base.Charsets.UTF_8;
+import static org.apache.jackrabbit.oak.segment.SegmentVersion.LATEST_VERSION;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -32,23 +33,30 @@ import java.nio.ByteBuffer;
 
 import javax.annotation.CheckForNull;
 
+import com.google.common.io.ByteStreams;
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.segment.RecordId;
 import org.apache.jackrabbit.oak.segment.Segment;
+import org.apache.jackrabbit.oak.segment.SegmentBufferWriterPool;
 import org.apache.jackrabbit.oak.segment.SegmentId;
-import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
-import org.apache.jackrabbit.oak.segment.SegmentTracker;
 import org.apache.jackrabbit.oak.segment.SegmentNodeState;
+import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
+import org.apache.jackrabbit.oak.segment.SegmentReader;
+import org.apache.jackrabbit.oak.segment.SegmentReaderImpl;
 import org.apache.jackrabbit.oak.segment.SegmentStore;
-
-import com.google.common.io.ByteStreams;
-
+import org.apache.jackrabbit.oak.segment.SegmentTracker;
+import org.apache.jackrabbit.oak.segment.SegmentWriter;
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 
 public class HttpStore implements SegmentStore {
 
     private final SegmentTracker tracker = new SegmentTracker(this);
 
+    private final SegmentWriter segmentWriter = new SegmentWriter(this,
+            new SegmentBufferWriterPool(this, LATEST_VERSION, "sys"));
+
+    private final SegmentReader segmentReader = new SegmentReaderImpl();
+
     private final URL base;
 
     /**
@@ -65,6 +73,16 @@ public class HttpStore implements Segmen
         return tracker;
     }
 
+    @Override
+    public SegmentWriter getWriter() {
+        return segmentWriter;
+    }
+
+    @Override
+    public SegmentReader getReader() {
+        return segmentReader;
+    }
+
     /**
      * Builds a simple URLConnection. This method can be extended to add
      * authorization headers if needed.
@@ -89,8 +107,7 @@ public class HttpStore implements Segmen
             try {
                 BufferedReader reader = new BufferedReader(
                         new InputStreamReader(stream, UTF_8));
-                return new SegmentNodeState(
-                        RecordId.fromString(tracker, reader.readLine()));
+                return new SegmentNodeState(this, RecordId.fromString(tracker, reader.readLine()));
             } finally {
                 stream.close();
             }
@@ -111,7 +128,7 @@ public class HttpStore implements Segmen
 
     @Override
     public boolean containsSegment(SegmentId id) {
-        return id.getTracker() == tracker || readSegment(id) != null;
+        return id.sameStore(this) || readSegment(id) != null;
     }
 
     @Override
@@ -121,7 +138,7 @@ public class HttpStore implements Segmen
             InputStream stream = connection.getInputStream();
             try {
                 byte[] data = ByteStreams.toByteArray(stream);
-                return new Segment(tracker, id, ByteBuffer.wrap(data));
+                return new Segment(this, id, ByteBuffer.wrap(data));
             } finally {
                 stream.close();
             }

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/memory/MemoryStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/memory/MemoryStore.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/memory/MemoryStore.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/memory/MemoryStore.java Mon May 23 12:59:22 2016
@@ -19,6 +19,7 @@
 package org.apache.jackrabbit.oak.segment.memory;
 
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static org.apache.jackrabbit.oak.segment.SegmentVersion.LATEST_VERSION;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -26,27 +27,33 @@ import java.util.concurrent.ConcurrentMa
 
 import javax.annotation.Nonnull;
 
+import com.google.common.collect.Maps;
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.segment.Segment;
+import org.apache.jackrabbit.oak.segment.SegmentBufferWriterPool;
 import org.apache.jackrabbit.oak.segment.SegmentId;
-import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
-import org.apache.jackrabbit.oak.segment.SegmentTracker;
 import org.apache.jackrabbit.oak.segment.SegmentNodeState;
+import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
+import org.apache.jackrabbit.oak.segment.SegmentReader;
+import org.apache.jackrabbit.oak.segment.SegmentReaderImpl;
 import org.apache.jackrabbit.oak.segment.SegmentStore;
-import org.apache.jackrabbit.oak.segment.SegmentVersion;
+import org.apache.jackrabbit.oak.segment.SegmentTracker;
 import org.apache.jackrabbit.oak.segment.SegmentWriter;
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 
-import com.google.common.collect.Maps;
-
 /**
  * A store used for in-memory operations.
  */
 public class MemoryStore implements SegmentStore {
 
-    private final SegmentTracker tracker = new SegmentTracker(this, 16, SegmentVersion.LATEST_VERSION);
+    private final SegmentTracker tracker = new SegmentTracker(this);
+
+    private final SegmentWriter segmentWriter = new SegmentWriter(this,
+            new SegmentBufferWriterPool(this, LATEST_VERSION, "sys"));
+
+    private final SegmentReader segmentReader = new SegmentReaderImpl(16);
 
     private SegmentNodeState head;
 
@@ -57,9 +64,8 @@ public class MemoryStore implements Segm
         NodeBuilder builder = EMPTY_NODE.builder();
         builder.setChildNode("root", root);
 
-        SegmentWriter writer = tracker.getWriter();
-        this.head = writer.writeNode(builder.getNodeState());
-        writer.flush();
+        this.head = segmentWriter.writeNode(builder.getNodeState());
+        segmentWriter.flush();
     }
 
     public MemoryStore() throws IOException {
@@ -72,6 +78,16 @@ public class MemoryStore implements Segm
     }
 
     @Override
+    public SegmentWriter getWriter() {
+        return segmentWriter;
+    }
+
+    @Override
+    public SegmentReader getReader() {
+        return segmentReader;
+    }
+
+    @Override
     public synchronized SegmentNodeState getHead() {
         return head;
     }
@@ -88,7 +104,7 @@ public class MemoryStore implements Segm
 
     @Override
     public boolean containsSegment(SegmentId id) {
-        return id.getTracker() == tracker || segments.containsKey(id);
+        return id.sameStore(this) || segments.containsKey(id);
     }
 
     @Override @Nonnull
@@ -106,7 +122,7 @@ public class MemoryStore implements Segm
         ByteBuffer buffer = ByteBuffer.allocate(length);
         buffer.put(data, offset, length);
         buffer.rewind();
-        Segment segment = new Segment(tracker, id, buffer);
+        Segment segment = new Segment(this, id, buffer);
         if (segments.putIfAbsent(id, segment) != null) {
             throw new IOException("Segment override: " + id);
         }

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java Mon May 23 12:59:22 2016
@@ -253,7 +253,7 @@ public class CompactionAndCleanupIT {
         nodeStore.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY);
 
         final Set<UUID> beforeSegments = new HashSet<UUID>();
-        collectSegments(store.getHead(), beforeSegments);
+        collectSegments(store, beforeSegments);
 
         final AtomicReference<Boolean> run = new AtomicReference<Boolean>(true);
         final List<String> failedCommits = newArrayList();
@@ -292,7 +292,7 @@ public class CompactionAndCleanupIT {
         assertTrue("Failed commits: " + failedCommits, failedCommits.isEmpty());
 
         Set<UUID> afterSegments = new HashSet<UUID>();
-        collectSegments(store.getHead(), afterSegments);
+        collectSegments(store, afterSegments);
         try {
             for (UUID u : beforeSegments) {
                 assertFalse("Mixed segments found: " + u, afterSegments.contains(u));
@@ -313,7 +313,7 @@ public class CompactionAndCleanupIT {
     @Test
     public void cleanupCyclicGraph() throws IOException, ExecutionException, InterruptedException {
         FileStore fileStore = FileStore.builder(getFileStoreFolder()).build();
-        final SegmentWriter writer = fileStore.getTracker().getWriter();
+        final SegmentWriter writer = fileStore.getWriter();
         final SegmentNodeState oldHead = fileStore.getHead();
 
         final SegmentNodeState child = run(new Callable<SegmentNodeState>() {
@@ -435,8 +435,8 @@ public class CompactionAndCleanupIT {
         }
     }
 
-    private static void collectSegments(SegmentNodeState s, final Set<UUID> segmentIds) {
-        new SegmentParser() {
+    private static void collectSegments(SegmentStore store, final Set<UUID> segmentIds) {
+        new SegmentParser(store) {
             @Override
             protected void onNode(RecordId parentId, RecordId nodeId) {
                 super.onNode(parentId, nodeId);
@@ -508,7 +508,7 @@ public class CompactionAndCleanupIT {
                 super.onListBucket(parentId, listId, index, count, capacity);
                 segmentIds.add(listId.asUUID());
             }
-        }.parseNode(s.getRecordId());
+        }.parseNode(store.getHead().getRecordId());
     }
 
     private static void createNodes(NodeBuilder builder, int count, int depth) {
@@ -569,7 +569,7 @@ public class CompactionAndCleanupIT {
 
             try {
                 fileStore.readSegment(id);
-                fail("Segment " + id + "should be gc'ed");
+                fail("Segment " + id + " should be gc'ed");
             } catch (SegmentNotFoundException ignore) {}
         } finally {
             fileStore.close();

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompareAgainstBaseStateTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompareAgainstBaseStateTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompareAgainstBaseStateTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompareAgainstBaseStateTest.java Mon May 23 12:59:22 2016
@@ -44,7 +44,7 @@ public class CompareAgainstBaseStateTest
     private NodeBuilder builder;
 
     public CompareAgainstBaseStateTest() throws IOException {
-        builder = new MemoryStore().getTracker().getWriter().writeNode(EMPTY_NODE).builder();
+        builder = new MemoryStore().getWriter().writeNode(EMPTY_NODE).builder();
     }
 
     @Before

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/MapRecordTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/MapRecordTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/MapRecordTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/MapRecordTest.java Mon May 23 12:59:22 2016
@@ -48,7 +48,7 @@ public class MapRecordTest {
     private NodeBuilder builder;
 
     public MapRecordTest() throws IOException {
-        builder = new MemoryStore().getTracker().getWriter().writeNode(EMPTY_NODE).builder();
+        builder = new MemoryStore().getWriter().writeNode(EMPTY_NODE).builder();
     }
 
     @Test

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordTest.java Mon May 23 12:59:22 2016
@@ -29,7 +29,6 @@ import static org.apache.jackrabbit.oak.
 import static org.apache.jackrabbit.oak.api.Type.STRINGS;
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
 import static org.apache.jackrabbit.oak.segment.ListRecord.LEVEL_SIZE;
-import static org.apache.jackrabbit.oak.segment.Segment.readString;
 import static org.apache.jackrabbit.oak.segment.SegmentVersion.LATEST_VERSION;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -58,6 +57,7 @@ import org.apache.jackrabbit.oak.spi.com
 import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -80,7 +80,12 @@ public class RecordTest {
     @Before
     public void setup() throws IOException {
         store = FileStore.builder(folder.getRoot()).build();
-        writer = store.getTracker().getWriter();
+        writer = store.getWriter();
+    }
+
+    @After
+    public void tearDown() {
+        store.close();
     }
 
     @Test
@@ -204,10 +209,10 @@ public class RecordTest {
 
         Segment segment = large.getSegmentId().getSegment();
 
-        assertEquals("", readString(empty));
-        assertEquals(" ", readString(space));
-        assertEquals("Hello, World!", readString(hello));
-        assertEquals(builder.toString(), readString(large));
+        assertEquals("", store.getReader().readString(empty));
+        assertEquals(" ", store.getReader().readString(space));
+        assertEquals("Hello, World!", store.getReader().readString(hello));
+        assertEquals(builder.toString(), store.getReader().readString(large));
     }
 
     @Test
@@ -429,7 +434,7 @@ public class RecordTest {
     @Test
     public void testCancel() throws IOException {
         NodeBuilder builder = EMPTY_NODE.builder();
-        SegmentBufferWriter bufferWriter = new SegmentBufferWriter(store, LATEST_VERSION, "test");
+        SegmentBufferWriter bufferWriter = new SegmentBufferWriter(store, LATEST_VERSION, "test", 0);
         NodeState state = writer.writeNode(builder.getNodeState(), bufferWriter, Suppliers.ofInstance(true));
         assertNull(state);
     }

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordUsageAnalyserTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordUsageAnalyserTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordUsageAnalyserTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/RecordUsageAnalyserTest.java Mon May 23 12:59:22 2016
@@ -31,8 +31,6 @@ import static org.apache.jackrabbit.oak.
 import static org.apache.jackrabbit.oak.segment.Segment.SMALL_LIMIT;
 import static org.apache.jackrabbit.oak.segment.SegmentVersion.LATEST_VERSION;
 import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.util.Random;
@@ -40,22 +38,20 @@ import java.util.Random;
 import com.google.common.collect.ImmutableList;
 import org.apache.jackrabbit.oak.api.Blob;
 import org.apache.jackrabbit.oak.plugins.memory.ArrayBasedBlob;
+import org.apache.jackrabbit.oak.segment.memory.MemoryStore;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.junit.Before;
 import org.junit.Test;
 
 public class RecordUsageAnalyserTest {
-    private SegmentStore store;
     private SegmentWriter writer;
-    private RecordUsageAnalyser analyser = new RecordUsageAnalyser();
+    private RecordUsageAnalyser analyser;
 
     @Before
-    public void setup() {
-        store = mock(SegmentStore.class);
-        SegmentTracker tracker = new SegmentTracker(store);
-        when(store.getTracker()).thenReturn(tracker);
-        writer = new SegmentWriter(store, new SegmentBufferWriter(store, LATEST_VERSION, ""));
-        analyser = new RecordUsageAnalyser();
+    public void setup() throws IOException {
+        SegmentStore store = new MemoryStore();
+        writer = new SegmentWriter(store, new SegmentBufferWriter(store, LATEST_VERSION, "", 0));
+        analyser = new RecordUsageAnalyser(store);
     }
 
     @Test
@@ -263,7 +259,6 @@ public class RecordUsageAnalyserTest {
         builder = node.builder();
         builder.child("child1").setProperty("p", "q");
 
-        when(store.containsSegment(node.getRecordId().getSegmentId())).thenReturn(true);
         node = (SegmentNodeState) builder.getNodeState();
 
         analyser.analyseNode(node.getRecordId());

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentCompactionIT.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentCompactionIT.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentCompactionIT.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentCompactionIT.java Mon May 23 12:59:22 2016
@@ -74,10 +74,10 @@ import org.apache.jackrabbit.oak.commons
 import org.apache.jackrabbit.oak.plugins.commit.ConflictHook;
 import org.apache.jackrabbit.oak.plugins.commit.DefaultConflictHandler;
 import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
+import org.apache.jackrabbit.oak.segment.compaction.SegmentRevisionGC;
 import org.apache.jackrabbit.oak.segment.compaction.SegmentRevisionGCMBean;
 import org.apache.jackrabbit.oak.segment.file.FileStore;
 import org.apache.jackrabbit.oak.segment.file.FileStoreGCMonitor;
-import org.apache.jackrabbit.oak.segment.compaction.SegmentRevisionGC;
 import org.apache.jackrabbit.oak.segment.file.GCMonitorMBean;
 import org.apache.jackrabbit.oak.spi.commit.CommitHook;
 import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
@@ -234,8 +234,8 @@ public class SegmentCompactionIT {
                 .build();
         nodeStore = SegmentNodeStore.builder(fileStore).build();
 
-        CacheStats segmentCacheStats = fileStore.getTracker().getSegmentCacheStats();
-        CacheStats stringCacheStats = fileStore.getTracker().getStringCacheStats();
+        CacheStats segmentCacheStats = fileStore.getSegmentCacheStats();
+        CacheStats stringCacheStats = fileStore.getReader().getStringCacheStats();
         List<Registration> registrations = newArrayList();
         registrations.add(registerMBean(segmentCompactionMBean,
                 new ObjectName("IT:TYPE=Segment Compaction")));
@@ -243,8 +243,7 @@ public class SegmentCompactionIT {
                 new ObjectName("IT:TYPE=Segment Revision GC")));
         registrations.add(registerMBean(fileStoreGCMonitor,
                 new ObjectName("IT:TYPE=GC Monitor")));
-        registrations.add(registerMBean(segmentCacheStats,
-                new ObjectName("IT:TYPE=" + segmentCacheStats.getName())));
+        registrations.add(registerMBean(segmentCacheStats, new ObjectName("IT:TYPE=" + segmentCacheStats.getName())));
         if (stringCacheStats != null) {
             registrations.add(registerMBean(stringCacheStats,
                     new ObjectName("IT:TYPE=" + stringCacheStats.getName())));

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentGraphTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentGraphTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentGraphTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentGraphTest.java Mon May 23 12:59:22 2016
@@ -71,10 +71,10 @@ public class SegmentGraphTest {
             segments.add(getSegmentId(root));
 
             SegmentWriter w1 = new SegmentWriter(store,
-                    new SegmentBufferWriter(store, LATEST_VERSION, "writer1"));
+                    new SegmentBufferWriter(store, LATEST_VERSION, "writer1", 0));
 
             SegmentWriter w2 = new SegmentWriter(store,
-                    new SegmentBufferWriter(store, LATEST_VERSION, "writer2"));
+                    new SegmentBufferWriter(store, LATEST_VERSION, "writer2", 0));
 
             SegmentWriter w3 = new SegmentWriter(store,
                     new SegmentBufferWriter(store, LATEST_VERSION, "writer3", 0));

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdFactoryTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdFactoryTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdFactoryTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdFactoryTest.java Mon May 23 12:59:22 2016
@@ -29,42 +29,43 @@ import org.apache.jackrabbit.oak.segment
 import org.junit.Test;
 
 public class SegmentIdFactoryTest {
-
-    private final SegmentTracker factory;
+    private final SegmentStore store;
+    private final SegmentTracker tracker;
 
     public SegmentIdFactoryTest() throws IOException {
-        factory = new MemoryStore().getTracker();
+        store = new MemoryStore();
+        tracker = store.getTracker();
     }
 
     @Test
     public void segmentIdType() {
-        assertTrue(factory.newDataSegmentId().isDataSegmentId());
-        assertTrue(factory.newBulkSegmentId().isBulkSegmentId());
+        assertTrue(tracker.newDataSegmentId().isDataSegmentId());
+        assertTrue(tracker.newBulkSegmentId().isBulkSegmentId());
 
-        assertFalse(factory.newDataSegmentId().isBulkSegmentId());
-        assertFalse(factory.newBulkSegmentId().isDataSegmentId());
+        assertFalse(tracker.newDataSegmentId().isBulkSegmentId());
+        assertFalse(tracker.newBulkSegmentId().isDataSegmentId());
     }
 
     @Test
     public void internedSegmentIds() {
-        assertTrue(factory.getSegmentId(0, 0) == factory.getSegmentId(0, 0));
-        assertTrue(factory.getSegmentId(1, 2) == factory.getSegmentId(1, 2));
-        assertTrue(factory.getSegmentId(1, 2) != factory.getSegmentId(3, 4));
+        assertTrue(tracker.getSegmentId(0, 0) == tracker.getSegmentId(0, 0));
+        assertTrue(tracker.getSegmentId(1, 2) == tracker.getSegmentId(1, 2));
+        assertTrue(tracker.getSegmentId(1, 2) != tracker.getSegmentId(3, 4));
     }
 
     @Test
     public void referencedSegmentIds() throws InterruptedException {
-        SegmentId a = factory.newDataSegmentId();
-        SegmentId b = factory.newBulkSegmentId();
-        SegmentId c = factory.newDataSegmentId();
+        SegmentId a = tracker.newDataSegmentId();
+        SegmentId b = tracker.newBulkSegmentId();
+        SegmentId c = tracker.newDataSegmentId();
 
-        Set<SegmentId> ids = factory.getReferencedSegmentIds();
+        Set<SegmentId> ids = tracker.getReferencedSegmentIds();
         assertTrue(ids.contains(a));
         assertTrue(ids.contains(b));
         assertTrue(ids.contains(c));
 
         // the returned set is a snapshot in time, not continuously updated
-        assertFalse(ids.contains(factory.newBulkSegmentId()));
+        assertFalse(ids.contains(tracker.newBulkSegmentId()));
     }
 
     /**
@@ -74,8 +75,8 @@ public class SegmentIdFactoryTest {
      */
     // @Test
     public void garbageCollection() {
-        SegmentId a = factory.newDataSegmentId();
-        SegmentId b = factory.newBulkSegmentId();
+        SegmentId a = tracker.newDataSegmentId();
+        SegmentId b = tracker.newBulkSegmentId();
 
         // generate lots of garbage copies of an UUID to get the
         // garbage collector to reclaim also the original instance
@@ -86,7 +87,7 @@ public class SegmentIdFactoryTest {
         System.gc();
 
         // now the original UUID should no longer be present
-        Set<SegmentId> ids = factory.getReferencedSegmentIds();
+        Set<SegmentId> ids = tracker.getReferencedSegmentIds();
         assertFalse(ids.contains(a));
         assertTrue(ids.contains(b));
     }
@@ -101,9 +102,9 @@ public class SegmentIdFactoryTest {
         byte[] buffer = new byte[segment.size()];
         segment.readBytes(Segment.MAX_SEGMENT_SIZE - segment.size(), buffer, 0, segment.size());
 
-        SegmentId id = factory.newDataSegmentId();
+        SegmentId id = tracker.newDataSegmentId();
         ByteBuffer data = ByteBuffer.wrap(buffer);
-        Segment s = new Segment(factory, id, data);
+        Segment s = new Segment(store, id, data);
         s.getRefId(1);
     }
 
@@ -112,9 +113,9 @@ public class SegmentIdFactoryTest {
      */
     @Test(expected = IllegalStateException.class)
     public void bulkAIOOBE() {
-        SegmentId id = factory.newBulkSegmentId();
+        SegmentId id = tracker.newBulkSegmentId();
         ByteBuffer data = ByteBuffer.allocate(4);
-        Segment s = new Segment(factory, id, data);
+        Segment s = new Segment(store, id, data);
         s.getRefId(1);
     }
 

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableBenchmark.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableBenchmark.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableBenchmark.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableBenchmark.java Mon May 23 12:59:22 2016
@@ -22,9 +22,6 @@ import java.io.IOException;
 import java.lang.ref.WeakReference;
 import java.util.Random;
 
-import org.apache.jackrabbit.oak.segment.SegmentId;
-import org.apache.jackrabbit.oak.segment.SegmentIdTable;
-import org.apache.jackrabbit.oak.segment.SegmentTracker;
 import org.apache.jackrabbit.oak.segment.memory.MemoryStore;
 
 public class SegmentIdTableBenchmark {
@@ -49,8 +46,8 @@ public class SegmentIdTableBenchmark {
         }
         
         time = System.currentTimeMillis();
-        SegmentTracker tracker = new MemoryStore().getTracker();
-        final SegmentIdTable tbl = new SegmentIdTable(tracker);
+        MemoryStore store = new MemoryStore();
+        final SegmentIdTable tbl = new SegmentIdTable(store);
         for (int i = 0; i < repeat; i++) {
             for (int j = 0; j < count; j++) {
                 tbl.getSegmentId(j, array[j]);
@@ -60,7 +57,7 @@ public class SegmentIdTableBenchmark {
         System.out.println("SegmentIdTable: " + time);
         
         time = System.currentTimeMillis();
-        ConcurrentTable cm = new ConcurrentTable(tracker, 16 * 1024);
+        ConcurrentTable cm = new ConcurrentTable(store, 16 * 1024);
         for (int i = 0; i < repeat; i++) {
             for (int j = 0; j < count; j++) {
                 cm.getSegmentId(j, array[j]);
@@ -84,11 +81,11 @@ public class SegmentIdTableBenchmark {
     }
     
     static class ConcurrentTable {
-        private final SegmentTracker tracker;
+        private final SegmentStore store;
         volatile WeakReference<SegmentId>[] map;
         @SuppressWarnings("unchecked")
-        ConcurrentTable(SegmentTracker tracker, int size) {
-            this.tracker = tracker;
+        ConcurrentTable(SegmentStore store, int size) {
+            this.store = store;
             map = (WeakReference<SegmentId>[]) new WeakReference[size];
         }
         SegmentId getSegmentId(long a, long b) {
@@ -101,7 +98,7 @@ public class SegmentIdTableBenchmark {
                 while (true) {
                     WeakReference<SegmentId> ref = m[index];
                     if (ref == null) {
-                        SegmentId id = new SegmentId(tracker, a, b);
+                        SegmentId id = new SegmentId(store, a, b);
                         ref = new WeakReference<SegmentId>(id);
                         m[index] = ref;
                         if (m != map) {

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentIdTableTest.java Mon May 23 12:59:22 2016
@@ -45,8 +45,8 @@ public class SegmentIdTableTest {
      */
     @Test
     public void endlessSearchLoop() throws IOException {
-        SegmentTracker tracker = new MemoryStore().getTracker();
-        final SegmentIdTable tbl = new SegmentIdTable(tracker);
+        MemoryStore store = new MemoryStore();
+        final SegmentIdTable tbl = new SegmentIdTable(store);
 
         List<SegmentId> refs = new ArrayList<SegmentId>();
         for (int i = 0; i < 1024; i++) {
@@ -75,8 +75,8 @@ public class SegmentIdTableTest {
     
     @Test
     public void randomized() throws IOException {
-        SegmentTracker tracker = new MemoryStore().getTracker();
-        final SegmentIdTable tbl = new SegmentIdTable(tracker);
+        MemoryStore store = new MemoryStore();
+        final SegmentIdTable tbl = new SegmentIdTable(store);
 
         List<SegmentId> refs = new ArrayList<SegmentId>();
         Random r = new Random(1);
@@ -98,8 +98,8 @@ public class SegmentIdTableTest {
     
     @Test
     public void clearTable() throws IOException {
-        SegmentTracker tracker = new MemoryStore().getTracker();
-        final SegmentIdTable tbl = new SegmentIdTable(tracker);
+        MemoryStore store = new MemoryStore();
+        final SegmentIdTable tbl = new SegmentIdTable(store);
 
         List<SegmentId> refs = new ArrayList<SegmentId>();
         int originalCount = 8;
@@ -135,8 +135,8 @@ public class SegmentIdTableTest {
     
     @Test
     public void justHashCollisions() throws IOException {
-        SegmentTracker tracker = new MemoryStore().getTracker();
-        final SegmentIdTable tbl = new SegmentIdTable(tracker);
+        MemoryStore store = new MemoryStore();
+        final SegmentIdTable tbl = new SegmentIdTable(store);
 
         List<SegmentId> refs = new ArrayList<SegmentId>();
         int originalCount = 1024;
@@ -159,8 +159,8 @@ public class SegmentIdTableTest {
     
     @Test
     public void gc() throws IOException {
-        SegmentTracker tracker = new MemoryStore().getTracker();
-        final SegmentIdTable tbl = new SegmentIdTable(tracker);
+        MemoryStore store = new MemoryStore();
+        final SegmentIdTable tbl = new SegmentIdTable(store);
 
         List<SegmentId> refs = new ArrayList<SegmentId>();
         int originalCount = 1024;

Modified: jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentParserTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentParserTest.java?rev=1745182&r1=1745181&r2=1745182&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentParserTest.java (original)
+++ jackrabbit/oak/trunk/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentParserTest.java Mon May 23 12:59:22 2016
@@ -66,7 +66,8 @@ public class SegmentParserTest {
     private static class TestParser extends SegmentParser {
         private final String name;
 
-        private TestParser(String name) {
+        private TestParser(SegmentStore store, String name) {
+            super(store);
             this.name = name;
         }
 
@@ -138,7 +139,7 @@ public class SegmentParserTest {
     @Before
     public void setup() throws IOException {
         store = new MemoryStore();
-        writer = new SegmentWriter(store, new SegmentBufferWriter(store, LATEST_VERSION, ""));
+        writer = new SegmentWriter(store, new SegmentBufferWriter(store, LATEST_VERSION, "", 0));
     }
 
     @After
@@ -149,7 +150,7 @@ public class SegmentParserTest {
     @Test
     public void emptyNode() throws IOException {
         SegmentNodeState node = writer.writeNode(EMPTY_NODE);
-        NodeInfo info = new TestParser("emptyNode") {
+        NodeInfo info = new TestParser(store, "emptyNode") {
             @Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
         }.parseNode(node.getRecordId());
         assertEquals(node.getRecordId(), info.nodeId);
@@ -164,7 +165,7 @@ public class SegmentParserTest {
         NodeBuilder builder = EMPTY_NODE.builder();
         builder.setChildNode("child");
         SegmentNodeState node = writer.writeNode(builder.getNodeState());
-        NodeInfo info = new TestParser("singleChildNode") {
+        NodeInfo info = new TestParser(store, "singleChildNode") {
             @Override protected void onNode(RecordId parentId, RecordId nodeId) { }
             @Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
         }.parseNode(node.getRecordId());
@@ -182,7 +183,7 @@ public class SegmentParserTest {
         builder.setChildNode("two");
         builder.setProperty("three", 42);
         SegmentNodeState node = writer.writeNode(builder.getNodeState());
-        NodeInfo info = new TestParser("node") {
+        NodeInfo info = new TestParser(store, "node") {
             @Override protected void onNode(RecordId parentId, RecordId nodeId) { }
             @Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
             @Override protected void onMap(RecordId parentId, RecordId mapId, MapRecord map) { }
@@ -204,7 +205,7 @@ public class SegmentParserTest {
         builder.setProperty("jcr:primaryType", "type", NAME);
         builder.setProperty("jcr:mixinTypes", ImmutableList.of("type1", "type2"), NAMES);
         SegmentNodeState node = writer.writeNode(builder.getNodeState());
-        NodeInfo nodeInfo = new TestParser("template") {
+        NodeInfo nodeInfo = new TestParser(store, "template") {
             @Override
             protected void onTemplate(RecordId parentId, RecordId templateId) {
                 TemplateInfo info = parseTemplate(templateId);
@@ -228,7 +229,7 @@ public class SegmentParserTest {
     public void emptyMap() throws IOException {
         Map<String, RecordId> empty = newHashMap();
         MapRecord map = writer.writeMap(null, empty);
-        MapInfo mapInfo = new TestParser("emptyMap") {
+        MapInfo mapInfo = new TestParser(store, "emptyMap") {
             @Override protected void onMapLeaf(RecordId parentId, RecordId mapId, MapRecord map) { }
         }.parseMap(null, map.getRecordId(), map);
         assertEquals(map.getRecordId(), mapInfo.mapId);
@@ -241,7 +242,7 @@ public class SegmentParserTest {
         MapRecord base = writer.writeMap(null, createMap(33, rnd));
         MapRecord map = writer.writeMap(base, createMap(1, rnd));
         final AtomicInteger size = new AtomicInteger();
-        MapInfo mapInfo = new TestParser("nonEmptyMap") {
+        MapInfo mapInfo = new TestParser(store, "nonEmptyMap") {
             @Override
             protected void onMapDiff(RecordId parentId, RecordId mapId, MapRecord map) {
                 MapInfo mapInfo = parseMapDiff(mapId, map);
@@ -286,7 +287,7 @@ public class SegmentParserTest {
         NodeBuilder builder = EMPTY_NODE.builder();
         builder.setProperty("p", 1);
         SegmentNodeState node = writer.writeNode(builder.getNodeState());
-        NodeInfo nodeInfo = new TestParser("singleValueProperty") {
+        NodeInfo nodeInfo = new TestParser(store, "singleValueProperty") {
             @Override
             protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) {
                 PropertyInfo propertyInfo = parseProperty(parentId, propertyId, template);
@@ -305,7 +306,7 @@ public class SegmentParserTest {
         NodeBuilder builder = EMPTY_NODE.builder();
         builder.setProperty("p", ImmutableList.of(1L, 2L, 3L, 4L), LONGS);
         SegmentNodeState node = writer.writeNode(builder.getNodeState());
-        NodeInfo nodeInfo = new TestParser("multiValueProperty") {
+        NodeInfo nodeInfo = new TestParser(store, "multiValueProperty") {
             @Override
             protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) {
                 PropertyInfo propertyInfo = parseProperty(parentId, propertyId, template);
@@ -322,7 +323,7 @@ public class SegmentParserTest {
     @Test
     public void smallBlob() throws IOException {
         SegmentBlob blob = writer.writeBlob(createRandomBlob(4));
-        ValueInfo valueInfo = new TestParser("smallBlob") {
+        ValueInfo valueInfo = new TestParser(store, "smallBlob") {
             @Override
             protected void onBlob(RecordId parentId, RecordId blobId) {
                 BlobInfo blobInfo = parseBlob(blobId);
@@ -338,7 +339,7 @@ public class SegmentParserTest {
     @Test
     public void mediumBlob() throws IOException {
         SegmentBlob blob = writer.writeBlob(createRandomBlob(SMALL_LIMIT));
-        ValueInfo valueInfo = new TestParser("mediumBlob") {
+        ValueInfo valueInfo = new TestParser(store, "mediumBlob") {
             @Override
             protected void onBlob(RecordId parentId, RecordId blobId) {
                 BlobInfo blobInfo = parseBlob(blobId);
@@ -354,7 +355,7 @@ public class SegmentParserTest {
     @Test
     public void longBlob() throws IOException {
         SegmentBlob blob = writer.writeBlob(createRandomBlob(MEDIUM_LIMIT));
-        ValueInfo valueInfo = new TestParser("longBlob") {
+        ValueInfo valueInfo = new TestParser(store, "longBlob") {
             @Override
             protected void onBlob(RecordId parentId, RecordId blobId) {
                 BlobInfo blobInfo = parseBlob(blobId);
@@ -377,7 +378,7 @@ public class SegmentParserTest {
     @Test
     public void shortString() throws IOException {
         RecordId stringId = writer.writeString("short");
-        BlobInfo blobInfo = new TestParser("shortString").parseString(stringId);
+        BlobInfo blobInfo = new TestParser(store, "shortString").parseString(stringId);
         assertEquals(stringId, blobInfo.blobId);
         assertEquals(SMALL, blobInfo.blobType);
         assertEquals(6, blobInfo.size);
@@ -386,7 +387,7 @@ public class SegmentParserTest {
     @Test
     public void mediumString() throws IOException {
         RecordId stringId = writer.writeString(repeat("s", SMALL_LIMIT));
-        BlobInfo blobInfo = new TestParser("mediumString").parseString(stringId);
+        BlobInfo blobInfo = new TestParser(store, "mediumString").parseString(stringId);
         assertEquals(stringId, blobInfo.blobId);
         assertEquals(MEDIUM, blobInfo.blobType);
         assertEquals(SMALL_LIMIT + 2, blobInfo.size);
@@ -395,7 +396,7 @@ public class SegmentParserTest {
     @Test
     public void longString() throws IOException {
         RecordId stringId = writer.writeString(repeat("s", MEDIUM_LIMIT));
-        BlobInfo blobInfo = new TestParser("longString"){
+        BlobInfo blobInfo = new TestParser(store, "longString"){
             @Override protected void onList(RecordId parentId, RecordId listId, int count) { }
         }.parseString(stringId);
         assertEquals(stringId, blobInfo.blobId);
@@ -406,7 +407,7 @@ public class SegmentParserTest {
     @Test
     public void emptyList() {
         RecordId listId = newRecordId(store.getTracker(), new Random());
-        ListInfo listInfo = new TestParser("emptyList").parseList(null, listId, 0);
+        ListInfo listInfo = new TestParser(store, "emptyList").parseList(null, listId, 0);
         assertEquals(listId, listInfo.listId);
         assertEquals(0, listInfo.count);
         assertEquals(0, listInfo.size);
@@ -421,7 +422,7 @@ public class SegmentParserTest {
             list.add(writer.writeString("string " + rnd.nextLong()));
         }
         RecordId listId = writer.writeList(list);
-        ListInfo listInfo = new TestParser("nonEmptyList"){
+        ListInfo listInfo = new TestParser(store, "nonEmptyList"){
             @Override
             protected void onListBucket(RecordId parentId, RecordId listId, int index, int count, int capacity) {
                 parseListBucket(listId, index, count, capacity);