You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by al...@apache.org on 2015/12/02 15:30:10 UTC

svn commit: r1717636 - /jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java

Author: alexparvulescu
Date: Wed Dec  2 14:30:10 2015
New Revision: 1717636

URL: http://svn.apache.org/viewvc?rev=1717636&view=rev
Log:
OAK-3715 SegmentWriter reduce buffer size for reading binaries


Modified:
    jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java

Modified: jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java?rev=1717636&r1=1717635&r2=1717636&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java (original)
+++ jackrabbit/oak/trunk/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java Wed Dec  2 14:30:10 2015
@@ -61,6 +61,7 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.SequenceInputStream;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -492,7 +493,7 @@ public class SegmentWriter {
     private RecordId internalWriteStream(InputStream stream)
             throws IOException {
         BlobStore blobStore = store.getBlobStore();
-        byte[] data = new byte[MAX_SEGMENT_SIZE];
+        byte[] data = new byte[Segment.MEDIUM_LIMIT];
         int n = read(stream, data, 0, data.length);
 
         // Special case for short binaries (up to about 16kB):
@@ -505,6 +506,8 @@ public class SegmentWriter {
             return writeBlobId(blobId);
         }
 
+        data = Arrays.copyOf(data, MAX_SEGMENT_SIZE);
+        n += read(stream, data, n, MAX_SEGMENT_SIZE - n);
         long length = n;
         List<RecordId> blockIds =
                 newArrayListWithExpectedSize(2 * n / BLOCK_SIZE);