You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by ma...@apache.org on 2019/08/08 19:10:10 UTC

svn commit: r1864728 - in /jackrabbit/oak/branches/1.10: oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/ oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/ oak-blob-plugins/src/test/java/org...

Author: mattryan
Date: Thu Aug  8 19:10:10 2019
New Revision: 1864728

URL: http://svn.apache.org/viewvc?rev=1864728&view=rev
Log:
OAK-8520: Return existing DataRecord when completeUpload called for existing binary

Modified:
    jackrabbit/oak/branches/1.10/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
    jackrabbit/oak/branches/1.10/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
    jackrabbit/oak/branches/1.10/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java

Modified: jackrabbit/oak/branches/1.10/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.10/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java?rev=1864728&r1=1864727&r2=1864728&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.10/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java (original)
+++ jackrabbit/oak/branches/1.10/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java Thu Aug  8 19:10:10 2019
@@ -948,30 +948,33 @@ public class AzureBlobStoreBackend exten
         DataRecordUploadToken uploadToken = DataRecordUploadToken.fromEncodedToken(uploadTokenStr, getOrCreateReferenceKey());
         String key = uploadToken.getBlobId();
         DataIdentifier blobId = new DataIdentifier(getIdentifierName(key));
-        try {
-            if (uploadToken.getUploadId().isPresent()) {
-                // An existing upload ID means this is a multi-part upload
-                CloudBlockBlob blob = getAzureContainer().getBlockBlobReference(key);
-                List<BlockEntry> blocks = blob.downloadBlockList(
-                        BlockListingFilter.UNCOMMITTED,
-                        AccessCondition.generateEmptyCondition(),
-                        null,
-                        null);
-                blob.commitBlockList(blocks);
-            }
-            // else do nothing - single put is already complete
 
-            if (! exists(blobId)) {
+        if (! exists(blobId)) {
+            try {
+                if (uploadToken.getUploadId().isPresent()) {
+                    // An existing upload ID means this is a multi-part upload
+                    CloudBlockBlob blob = getAzureContainer().getBlockBlobReference(key);
+                    List<BlockEntry> blocks = blob.downloadBlockList(
+                            BlockListingFilter.UNCOMMITTED,
+                            AccessCondition.generateEmptyCondition(),
+                            null,
+                            null);
+                    blob.commitBlockList(blocks);
+                }
+                // else do nothing - single put is already complete
+
+                if (!exists(blobId)) {
+                    throw new DataRecordUploadException(
+                            String.format("Unable to finalize direct write of binary %s", blobId));
+                }
+            } catch (URISyntaxException | StorageException e) {
                 throw new DataRecordUploadException(
-                        String.format("Unable to finalize direct write of binary %s", blobId));
+                        String.format("Unable to finalize direct write of binary %s", blobId),
+                        e
+                );
             }
         }
-        catch (URISyntaxException | StorageException e) {
-            throw new DataRecordUploadException(
-                    String.format("Unable to finalize direct write of binary %s", blobId),
-                    e
-            );
-        }
+        // else return the already existing record for this blob ID
 
         return getRecord(blobId);
     }

Modified: jackrabbit/oak/branches/1.10/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.10/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java?rev=1864728&r1=1864727&r2=1864728&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.10/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java (original)
+++ jackrabbit/oak/branches/1.10/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java Thu Aug  8 19:10:10 2019
@@ -906,33 +906,37 @@ public class S3Backend extends AbstractS
 
         DataRecordUploadToken uploadToken = DataRecordUploadToken.fromEncodedToken(uploadTokenStr, getOrCreateReferenceKey());
         String blobId = uploadToken.getBlobId();
-        if (uploadToken.getUploadId().isPresent()) {
-            // An existing upload ID means this is a multi-part upload
-            String uploadId = uploadToken.getUploadId().get();
-            ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, blobId, uploadId);
-            PartListing listing = s3service.listParts(listPartsRequest);
-            List<PartETag> eTags = Lists.newArrayList();
-            for (PartSummary partSummary : listing.getParts()) {
-                PartETag eTag = new PartETag(partSummary.getPartNumber(), partSummary.getETag());
-                eTags.add(eTag);
-            }
+        DataIdentifier dataIdentifier = new DataIdentifier(getIdentifierName(blobId));
 
-            CompleteMultipartUploadRequest completeReq = new CompleteMultipartUploadRequest(
-                    bucket,
-                    blobId,
-                    uploadId,
-                    eTags
-            );
+        if (! exists(dataIdentifier)) {
+            if (uploadToken.getUploadId().isPresent()) {
+                // An existing upload ID means this is a multi-part upload
+                String uploadId = uploadToken.getUploadId().get();
+                ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, blobId, uploadId);
+                PartListing listing = s3service.listParts(listPartsRequest);
+                List<PartETag> eTags = Lists.newArrayList();
+                for (PartSummary partSummary : listing.getParts()) {
+                    PartETag eTag = new PartETag(partSummary.getPartNumber(), partSummary.getETag());
+                    eTags.add(eTag);
+                }
+
+                CompleteMultipartUploadRequest completeReq = new CompleteMultipartUploadRequest(
+                        bucket,
+                        blobId,
+                        uploadId,
+                        eTags
+                );
 
-            s3service.completeMultipartUpload(completeReq);
-        }
-        // else do nothing - single-put upload is already complete
+                s3service.completeMultipartUpload(completeReq);
+            }
+            // else do nothing - single-put upload is already complete
 
 
-        if (! s3service.doesObjectExist(bucket, blobId)) {
-            throw new DataRecordUploadException(
-                    String.format("Unable to finalize direct write of binary %s", blobId)
-            );
+            if (!s3service.doesObjectExist(bucket, blobId)) {
+                throw new DataRecordUploadException(
+                        String.format("Unable to finalize direct write of binary %s", blobId)
+                );
+            }
         }
 
         return getRecord(new DataIdentifier(getIdentifierName(blobId)));

Modified: jackrabbit/oak/branches/1.10/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/branches/1.10/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java?rev=1864728&r1=1864727&r2=1864728&view=diff
==============================================================================
--- jackrabbit/oak/branches/1.10/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java (original)
+++ jackrabbit/oak/branches/1.10/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/directaccess/AbstractDataRecordAccessProviderTest.java Thu Aug  8 19:10:10 2019
@@ -29,9 +29,11 @@ import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
 import java.net.URI;
 import java.net.URLDecoder;
+import java.nio.charset.Charset;
 import java.util.Arrays;
 import java.util.Map;
 
@@ -488,6 +490,35 @@ public abstract class AbstractDataRecord
     }
 
     @Test
+    public void testCompleteAlreadyUploadedBinaryReturnsSameBinaryIT() throws DataStoreException, DataRecordUploadException, IOException {
+        DataRecordAccessProvider ds = getDataStore();
+        DataRecord uploadedRecord = null;
+        try {
+            DataRecordUpload uploadContext = ds.initiateDataRecordUpload(ONE_MB, 1);
+            InputStream uploadStream = randomStream(0, ONE_MB);
+            URI uploadURI = uploadContext.getUploadURIs().iterator().next();
+            doHttpsUpload(uploadStream, ONE_MB, uploadURI);
+            uploadedRecord = ds.completeDataRecordUpload(uploadContext.getUploadToken());
+            assertEquals(ONE_MB, uploadedRecord.getLength());
+
+            DataRecord secondRecord = ds.completeDataRecordUpload(uploadContext.getUploadToken());
+
+            assertEquals(uploadedRecord.getIdentifier(), secondRecord.getIdentifier());
+            assertEquals(uploadedRecord.getLength(), secondRecord.getLength());
+            StringWriter original = new StringWriter();
+            IOUtils.copy(uploadedRecord.getStream(), original, Charset.forName("UTF-8"));
+            StringWriter second = new StringWriter();
+            IOUtils.copy(secondRecord.getStream(), second, Charset.forName("UTF-8"));
+            assertEquals(original.toString(), second.toString());
+        }
+        finally {
+            if (null != uploadedRecord) {
+                doDeleteRecord((DataStore) ds, uploadedRecord.getIdentifier());
+            }
+        }
+    }
+
+    @Test
     public void testSinglePutDirectUploadIT() throws DataRecordUploadException, DataStoreException, IOException {
         DataRecordAccessProvider ds = getDataStore();
         for (InitUploadResult res : Lists.newArrayList(