You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by el...@apache.org on 2021/02/08 15:41:14 UTC
[ozone] branch master updated: HDDS-1997. Support copy-source-if-(un)modified-since headers for MPU … (#1817)
This is an automated email from the ASF dual-hosted git repository.
elek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new 4729fe1 HDDS-1997. Support copy-source-if-(un)modified-since headers for MPU … (#1817)
4729fe1 is described below
commit 4729fe170150a194b4d412b319310dfed770d987
Author: Symious <yi...@foxmail.com>
AuthorDate: Mon Feb 8 23:40:57 2021 +0800
HDDS-1997. Support copy-source-if-(un)modified-since headers for MPU … (#1817)
---
.../src/main/smoketest/s3/MultipartUpload.robot | 39 ++++++++++
.../hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 47 +++++++++++
.../hadoop/ozone/s3/exception/S3ErrorTable.java | 5 ++
.../org/apache/hadoop/ozone/s3/util/S3Consts.java | 7 ++
.../s3/endpoint/TestMultipartUploadWithCopy.java | 90 +++++++++++++++++++++-
5 files changed, 187 insertions(+), 1 deletion(-)
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot b/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot
index e05a706..b9d99f2 100644
--- a/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/MultipartUpload.robot
@@ -17,6 +17,7 @@
Documentation S3 gateway test with aws cli
Library OperatingSystem
Library String
+Library DateTime
Resource ../commonlib.robot
Resource commonawslib.robot
Test Timeout 5 minutes
@@ -274,6 +275,44 @@ Test Multipart Upload Put With Copy and range
Compare files /tmp/part1 /tmp/part-result
+Test Multipart Upload Put With Copy and range with IfModifiedSince
+ Run Keyword Create Random file 10
+ ${curDate} = Get Current Date
+ ${beforeCreate} = Subtract Time From Date ${curDate} 1 day
+ ${afterCreate} = Add Time To Date ${curDate} 1 day
+
+ ${result} = Execute AWSS3APICli put-object --bucket ${BUCKET} --key ${PREFIX}/copyrange/source --body /tmp/part1
+
+ ${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination
+
+ ${uploadID} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0
+ Should contain ${result} ${BUCKET}
+ Should contain ${result} UploadId
+
+ ${result} = Execute AWSS3APICli and checkrc upload-part-copy --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination --upload-id ${uploadID} --part-number 1 --copy-source ${BUCKET}/${PREFIX}/copyrange/source --copy-source-range bytes=0-10485757 --copy-source-if-modified-since '${afterCreate}' 255
+ Should contain ${result} PreconditionFailed
+
+ ${result} = Execute AWSS3APICli and checkrc upload-part-copy --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination --upload-id ${uploadID} --part-number 2 --copy-source ${BUCKET}/${PREFIX}/copyrange/source --copy-source-range bytes=10485758-10485759 --copy-source-if-unmodified-since '${beforeCreate}' 255
+ Should contain ${result} PreconditionFailed
+
+ ${result} = Execute AWSS3APICli and checkrc upload-part-copy --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination --upload-id ${uploadID} --part-number 1 --copy-source ${BUCKET}/${PREFIX}/copyrange/source --copy-source-range bytes=0-10485757 --copy-source-if-modified-since '${beforeCreate}' 0
+ Should contain ${result} ETag
+ Should contain ${result} LastModified
+
+ ${eTag1} = Execute and checkrc echo '${result}' | jq -r '.CopyPartResult.ETag' 0
+
+ ${result} = Execute AWSS3APICli and checkrc upload-part-copy --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination --upload-id ${uploadID} --part-number 2 --copy-source ${BUCKET}/${PREFIX}/copyrange/source --copy-source-range bytes=10485758-10485759 --copy-source-if-unmodified-since '${afterCreate}' 0
+ Should contain ${result} ETag
+ Should contain ${result} LastModified
+
+ ${eTag2} = Execute and checkrc echo '${result}' | jq -r '.CopyPartResult.ETag' 0
+
+
+ Execute AWSS3APICli complete-multipart-upload --upload-id ${uploadID} --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination --multipart-upload 'Parts=[{ETag=${eTag1},PartNumber=1},{ETag=${eTag2},PartNumber=2}]'
+ Execute AWSS3APICli get-object --bucket ${BUCKET} --key ${PREFIX}/copyrange/destination /tmp/part-result
+
+ Compare files /tmp/part1 /tmp/part-result
+
Test Multipart Upload list
${result} = Execute AWSS3APICli create-multipart-upload --bucket ${BUCKET} --key ${PREFIX}/listtest/key1
${uploadID1} = Execute and checkrc echo '${result}' | jq -r '.UploadId' 0
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 6b4efb7..27e0ef3 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -41,6 +41,7 @@ import javax.ws.rs.core.StreamingOutput;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
+import java.text.ParseException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
@@ -89,10 +90,13 @@ import static org.apache.hadoop.ozone.s3.S3GatewayConfigKeys.OZONE_S3G_CLIENT_BU
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.ENTITY_TOO_SMALL;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.INVALID_REQUEST;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.NO_SUCH_UPLOAD;
+import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.PRECOND_FAILED;
import static org.apache.hadoop.ozone.s3.util.S3Consts.ACCEPT_RANGE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.CONTENT_RANGE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_HEADER_RANGE;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_IF_MODIFIED_SINCE;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_IF_UNMODIFIED_SINCE;
import static org.apache.hadoop.ozone.s3.util.S3Consts.RANGE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.RANGE_HEADER_SUPPORTED_UNIT;
import static org.apache.hadoop.ozone.s3.util.S3Consts.STORAGE_CLASS_HEADER;
@@ -584,6 +588,18 @@ public class ObjectEndpoint extends EndpointBase {
String sourceBucket = result.getLeft();
String sourceKey = result.getRight();
+ Long sourceKeyModificationTime = getBucket(sourceBucket).
+ getKey(sourceKey).getModificationTime().toEpochMilli();
+ String copySourceIfModifiedSince =
+ headers.getHeaderString(COPY_SOURCE_IF_MODIFIED_SINCE);
+ String copySourceIfUnmodifiedSince =
+ headers.getHeaderString(COPY_SOURCE_IF_UNMODIFIED_SINCE);
+ if (!checkCopySourceModificationTime(sourceKeyModificationTime,
+ copySourceIfModifiedSince, copySourceIfUnmodifiedSince)) {
+ throw S3ErrorTable.newError(PRECOND_FAILED,
+ sourceBucket + "/" + sourceKey);
+ }
+
try (OzoneInputStream sourceObject =
getBucket(sourceBucket).readKey(sourceKey)) {
@@ -834,4 +850,35 @@ public class ObjectEndpoint extends EndpointBase {
}
return partMarker;
}
+
+ private static long parseOzoneDate(String ozoneDateStr) throws OS3Exception {
+ long ozoneDateInMs;
+ try {
+ ozoneDateInMs = OzoneUtils.formatDate(ozoneDateStr);
+ } catch (ParseException e) {
+ throw S3ErrorTable.newError(S3ErrorTable
+ .INVALID_ARGUMENT, ozoneDateStr);
+ }
+ return ozoneDateInMs;
+ }
+
+ private boolean checkCopySourceModificationTime(Long lastModificationTime,
+ String copySourceIfModifiedSinceStr,
+ String copySourceIfUnmodifiedSinceStr) throws OS3Exception {
+ long copySourceIfModifiedSince = Long.MIN_VALUE;
+ long copySourceIfUnmodifiedSince = Long.MAX_VALUE;
+
+ if (copySourceIfModifiedSinceStr != null) {
+ copySourceIfModifiedSince =
+ parseOzoneDate(copySourceIfModifiedSinceStr);
+ }
+
+ if (copySourceIfUnmodifiedSinceStr != null) {
+ copySourceIfUnmodifiedSince =
+ parseOzoneDate(copySourceIfUnmodifiedSinceStr);
+ }
+
+ return (copySourceIfModifiedSince <= lastModificationTime) &&
+ (lastModificationTime <= copySourceIfUnmodifiedSince);
+ }
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
index 13eac13..9e795a7 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
@@ -24,6 +24,7 @@ import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
import static java.net.HttpURLConnection.HTTP_CONFLICT;
import static java.net.HttpURLConnection.HTTP_FORBIDDEN;
import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
+import static java.net.HttpURLConnection.HTTP_PRECON_FAILED;
import static java.net.HttpURLConnection.HTTP_SERVER_ERROR;
import static org.apache.hadoop.ozone.s3.util.S3Consts.RANGE_NOT_SATISFIABLE;
@@ -110,6 +111,10 @@ public final class S3ErrorTable {
"AccessDenied", "User doesn't have the right to access this " +
"resource.", HTTP_FORBIDDEN);
+ public static final OS3Exception PRECOND_FAILED = new OS3Exception(
+ "PreconditionFailed", "At least one of the pre-conditions you " +
+ "specified did not hold", HTTP_PRECON_FAILED);
+
/**
* Create a new instance of Error.
* @param e Error Template
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Consts.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Consts.java
index 9f9440b..f891e13 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Consts.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Consts.java
@@ -40,6 +40,13 @@ public final class S3Consts {
public static final String ENCODING_TYPE = "url";
// Constants related to Range Header
+ public static final String COPY_SOURCE_IF_PREFIX = "x-amz-copy-source-if-";
+ public static final String COPY_SOURCE_IF_MODIFIED_SINCE =
+ COPY_SOURCE_IF_PREFIX + "modified-since";
+ public static final String COPY_SOURCE_IF_UNMODIFIED_SINCE =
+ COPY_SOURCE_IF_PREFIX + "unmodified-since";
+
+ // Constants related to Range Header
public static final String RANGE_HEADER_SUPPORTED_UNIT = "bytes";
public static final String RANGE_HEADER = "Range";
public static final String ACCEPT_RANGE_HEADER = "Accept-Ranges";
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultipartUploadWithCopy.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultipartUploadWithCopy.java
index 17911c5..29f6dcf 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultipartUploadWithCopy.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestMultipartUploadWithCopy.java
@@ -44,13 +44,20 @@ import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_HEADER_RANGE;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_IF_MODIFIED_SINCE;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_IF_UNMODIFIED_SINCE;
import static org.apache.hadoop.ozone.s3.util.S3Consts.STORAGE_CLASS_HEADER;
+
+import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
+import org.apache.hadoop.ozone.web.utils.OzoneUtils;
import org.junit.Assert;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
+
+import static org.junit.Assert.fail;
import static org.mockito.Mockito.when;
/**
@@ -92,6 +99,14 @@ public class TestMultipartUploadWithCopy {
@Test
public void testMultipart() throws Exception {
+ Long sourceKeyLastModificationTime = CLIENT.getObjectStore()
+ .getS3Bucket(OzoneConsts.S3_BUCKET)
+ .getKey(EXISTING_KEY)
+ .getModificationTime().toEpochMilli();
+ String beforeSourceKeyModificationTimeStr =
+ OzoneUtils.formatTime(sourceKeyLastModificationTime - 1000);
+ String afterSourceKeyModificationTimeStr =
+ OzoneUtils.formatTime(sourceKeyLastModificationTime + 1000);
// Initiate multipart upload
String uploadID = initiateMultipartUpload(KEY);
@@ -114,6 +129,14 @@ public class TestMultipartUploadWithCopy {
OzoneConsts.S3_BUCKET + "/" + EXISTING_KEY, "bytes=0-3");
partsList.add(part3);
+ Part part4 =
+ uploadPartWithCopy(KEY, uploadID, 3,
+ OzoneConsts.S3_BUCKET + "/" + EXISTING_KEY, "bytes=0-3",
+ beforeSourceKeyModificationTimeStr,
+ afterSourceKeyModificationTimeStr
+ );
+ partsList.add(part4);
+
// complete multipart upload
CompleteMultipartUploadRequest completeMultipartUploadRequest = new
CompleteMultipartUploadRequest();
@@ -133,6 +156,58 @@ public class TestMultipartUploadWithCopy {
}
}
+ @Test
+ public void testMultipartIfModifiedSince() throws Exception {
+ Long sourceKeyLastModificationTime = CLIENT.getObjectStore()
+ .getS3Bucket(OzoneConsts.S3_BUCKET)
+ .getKey(EXISTING_KEY)
+ .getModificationTime().toEpochMilli();
+ String beforeSourceKeyModificationTimeStr =
+ OzoneUtils.formatTime(sourceKeyLastModificationTime - 1000);
+ String afterSourceKeyModificationTimeStr =
+ OzoneUtils.formatTime(sourceKeyLastModificationTime + 1000);
+
+ // Initiate multipart upload
+ String uploadID = initiateMultipartUpload(KEY);
+
+ // ifUnmodifiedSince = beforeSourceKeyModificationTime,
+ // ifModifiedSince = afterSourceKeyModificationTime
+ try {
+ uploadPartWithCopy(KEY, uploadID, 1,
+ OzoneConsts.S3_BUCKET + "/" + EXISTING_KEY, "bytes=0-3",
+ afterSourceKeyModificationTimeStr,
+ beforeSourceKeyModificationTimeStr
+ );
+ fail("testMultipartIfModifiedSinceError");
+ } catch (OS3Exception ex) {
+ assertEquals(ex.getCode(), S3ErrorTable.PRECOND_FAILED.getCode());
+ }
+
+ // ifUnmodifiedSince = beforeSourceKeyModificationTime,
+ try {
+ uploadPartWithCopy(KEY, uploadID, 1,
+ OzoneConsts.S3_BUCKET + "/" + EXISTING_KEY, "bytes=0-3",
+ null,
+ beforeSourceKeyModificationTimeStr
+ );
+ fail("testMultipartIfModifiedSinceError");
+ } catch (OS3Exception ex) {
+ assertEquals(ex.getCode(), S3ErrorTable.PRECOND_FAILED.getCode());
+ }
+
+ // ifModifiedSince = afterSourceKeyModificationTime
+ try {
+ uploadPartWithCopy(KEY, uploadID, 1,
+ OzoneConsts.S3_BUCKET + "/" + EXISTING_KEY, "bytes=0-3",
+ afterSourceKeyModificationTimeStr,
+ null
+ );
+ fail("testMultipartIfModifiedSinceError");
+ } catch (OS3Exception ex) {
+ assertEquals(ex.getCode(), S3ErrorTable.PRECOND_FAILED.getCode());
+ }
+ }
+
private String initiateMultipartUpload(String key) throws IOException,
OS3Exception {
setHeaders();
@@ -167,11 +242,24 @@ public class TestMultipartUploadWithCopy {
private Part uploadPartWithCopy(String key, String uploadID, int partNumber,
String keyOrigin, String range) throws IOException, OS3Exception {
+ return uploadPartWithCopy(key, uploadID, partNumber, keyOrigin,
+ range, null, null);
+ }
+
+ private Part uploadPartWithCopy(String key, String uploadID, int partNumber,
+ String keyOrigin, String range, String ifModifiedSinceStr,
+ String ifUnmodifiedSinceStr) throws IOException, OS3Exception {
Map<String, String> additionalHeaders = new HashMap<>();
additionalHeaders.put(COPY_SOURCE_HEADER, keyOrigin);
if (range != null) {
additionalHeaders.put(COPY_SOURCE_HEADER_RANGE, range);
-
+ }
+ if (ifModifiedSinceStr != null) {
+ additionalHeaders.put(COPY_SOURCE_IF_MODIFIED_SINCE, ifModifiedSinceStr);
+ }
+ if (ifUnmodifiedSinceStr != null) {
+ additionalHeaders.put(COPY_SOURCE_IF_UNMODIFIED_SINCE,
+ ifUnmodifiedSinceStr);
}
setHeaders(additionalHeaders);
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ozone.apache.org
For additional commands, e-mail: commits-help@ozone.apache.org