You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by ad...@apache.org on 2022/05/02 17:46:45 UTC

[ozone] branch master updated: HDDS-6529. Adding Unit-Test cases for S3-Gateway Object-Endpoint Metrics (#3302)

This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new d53cb2c1b7 HDDS-6529. Adding Unit-Test cases for S3-Gateway Object-Endpoint Metrics (#3302)
d53cb2c1b7 is described below

commit d53cb2c1b76d07d1ef16a1b616abd010f00049f2
Author: Arafat2198 <98...@users.noreply.github.com>
AuthorDate: Mon May 2 23:16:38 2022 +0530

    HDDS-6529. Adding Unit-Test cases for S3-Gateway Object-Endpoint Metrics (#3302)
---
 .../hadoop/ozone/s3/endpoint/ObjectEndpoint.java   |  19 +-
 .../hadoop/ozone/s3/metrics/S3GatewayMetrics.java  |  80 +++++
 .../ozone/s3/metrics/TestS3GatewayMetrics.java     | 367 +++++++++++++++++++--
 3 files changed, 436 insertions(+), 30 deletions(-)

diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 6f4aff7d8b..d917fdc113 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -236,8 +236,14 @@ public class ObjectEndpoint extends EndpointBase {
       throw ex;
     } catch (Exception ex) {
       auditSuccess = false;
-      AUDIT.logWriteFailure(
-          buildAuditMessageForFailure(s3GAction, getAuditParameters(), ex));
+      AUDIT.logWriteFailure(buildAuditMessageForFailure(s3GAction,
+          getAuditParameters(), ex));
+      if (copyHeader != null) {
+        getMetrics().incCopyObjectFailure();
+      } else {
+        getMetrics().incCreateKeyFailure();
+      }
+      LOG.error("Exception occurred in PutObject", ex.getMessage());
       throw ex;
     } finally {
       if (auditSuccess) {
@@ -516,6 +522,11 @@ public class ObjectEndpoint extends EndpointBase {
     } catch (Exception ex) {
       AUDIT.logWriteFailure(
           buildAuditMessageForFailure(s3GAction, getAuditParameters(), ex));
+      if (uploadId != null && !uploadId.equals("")) {
+        getMetrics().incAbortMultiPartUploadFailure();
+      } else {
+        getMetrics().incDeleteKeyFailure();
+      }
       throw ex;
     }
     getMetrics().incDeleteKeySuccess();
@@ -576,6 +587,7 @@ public class ObjectEndpoint extends EndpointBase {
     } catch (Exception ex) {
       AUDIT.logWriteFailure(
           buildAuditMessageForFailure(s3GAction, getAuditParameters(), ex));
+      getMetrics().incInitMultiPartUploadFailure();
       throw ex;
     }
   }
@@ -809,6 +821,7 @@ public class ObjectEndpoint extends EndpointBase {
       });
 
     } catch (OMException ex) {
+      getMetrics().incListPartsFailure();
       if (ex.getResult() == ResultCodes.NO_SUCH_MULTIPART_UPLOAD_ERROR) {
         throw newError(NO_SUCH_UPLOAD, uploadID, ex);
       } else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
@@ -970,7 +983,7 @@ public class ObjectEndpoint extends EndpointBase {
     }
 
     long currentDate = System.currentTimeMillis();
-    if  (ozoneDateInMs <= currentDate) {
+    if (ozoneDateInMs <= currentDate) {
       return OptionalLong.of(ozoneDateInMs);
     } else {
       // dates in the future are invalid, so return empty()
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/metrics/S3GatewayMetrics.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/metrics/S3GatewayMetrics.java
index b8b4ce7124..b978751f3d 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/metrics/S3GatewayMetrics.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/metrics/S3GatewayMetrics.java
@@ -365,4 +365,84 @@ public final class S3GatewayMetrics implements MetricsSource {
   public long getListMultipartUploadsFailure() {
     return listMultipartUploadsFailure.value();
   }
+
+  public long getCreateMultipartKeySuccess() {
+    return createMultipartKeySuccess.value();
+  }
+
+  public long getCreateMultipartKeyFailure() {
+    return createMultipartKeyFailure.value();
+  }
+
+  public long getCompleteMultiPartUploadSuccess() {
+    return completeMultiPartUploadSuccess.value();
+  }
+
+  public long getCompleteMultiPartUploadFailure() {
+    return completeMultiPartUploadFailure.value();
+  }
+
+  public long getListPartsSuccess() {
+    return listPartsSuccess.value();
+  }
+
+  public long getListPartsFailure() {
+    return listPartsFailure.value();
+  }
+
+  public long getCopyObjectSuccess() {
+    return copyObjectSuccess.value();
+  }
+
+  public long getCopyObjectFailure() {
+    return copyObjectFailure.value();
+  }
+
+  public long getCreateKeyFailure() {
+    return createKeyFailure.value();
+  }
+
+  public long getCreateKeySuccess() {
+    return createKeySuccess.value();
+  }
+
+  public long getInitMultiPartUploadSuccess() {
+    return initMultiPartUploadSuccess.value();
+  }
+
+  public long getInitMultiPartUploadFailure() {
+    return initMultiPartUploadFailure.value();
+  }
+
+  public long getDeleteKeySuccess() {
+    return deleteKeySuccess.value();
+  }
+
+  public long getDeleteKeyFailure() {
+    return deleteKeyFailure.value();
+  }
+
+  public long getGetKeyFailure() {
+    return getKeyFailure.value();
+  }
+
+  public long getGetKeySuccess() {
+    return getKeySuccess.value();
+  }
+
+  public long getAbortMultiPartUploadSuccess() {
+    return abortMultiPartUploadSuccess.value();
+  }
+
+  public long getAbortMultiPartUploadFailure() {
+    return abortMultiPartUploadFailure.value();
+  }
+
+  public long getHeadKeyFailure() {
+    return headKeyFailure.value();
+  }
+
+  public long getListS3BucketsFailure() {
+    return listS3BucketsFailure.value();
+  }
 }
\ No newline at end of file
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java
index ccf36a70c0..9ce6a95001 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java
@@ -19,35 +19,40 @@
  */
 package org.apache.hadoop.ozone.s3.metrics;
 
-import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.hadoop.hdds.client.ReplicationFactor;
-import org.apache.hadoop.hdds.client.ReplicationType;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.client.OzoneBucket;
 import org.apache.hadoop.ozone.client.OzoneClient;
 import org.apache.hadoop.ozone.client.OzoneClientStub;
-import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
 import org.apache.hadoop.ozone.s3.endpoint.BucketEndpoint;
 import org.apache.hadoop.ozone.s3.endpoint.ObjectEndpoint;
 import org.apache.hadoop.ozone.s3.endpoint.RootEndpoint;
 import org.apache.hadoop.ozone.s3.endpoint.TestBucketAcl;
+import org.apache.hadoop.ozone.s3.endpoint.MultipartUploadInitiateResponse;
+import org.apache.hadoop.ozone.s3.endpoint.CompleteMultipartUploadRequest;
 import org.apache.hadoop.ozone.s3.exception.OS3Exception;
 import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
+
+
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Response;
+import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.HashMap;
 
 import static java.net.HttpURLConnection.HTTP_OK;
 import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_HEADER;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.STORAGE_CLASS_HEADER;
+import static org.apache.hadoop.ozone.s3.util.S3Utils.urlEncode;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests for {@link S3GatewayMetrics}.
@@ -55,6 +60,7 @@ import static org.junit.Assert.fail;
 public class TestS3GatewayMetrics {
 
   private String bucketName = OzoneConsts.BUCKET;
+  private String keyName = OzoneConsts.KEY;
   private OzoneClient clientStub;
   private BucketEndpoint bucketEndpoint;
   private RootEndpoint rootEndpoint;
@@ -62,6 +68,7 @@ public class TestS3GatewayMetrics {
   private OzoneBucket bucket;
   private HttpHeaders headers;
   private static final String ACL_MARKER = "acl";
+  private static final String CONTENT = "0123456789";
   private S3GatewayMetrics metrics;
 
 
@@ -82,11 +89,18 @@ public class TestS3GatewayMetrics {
     keyEndpoint.setOzoneConfiguration(new OzoneConfiguration());
 
     headers = Mockito.mock(HttpHeaders.class);
+    when(headers.getHeaderString(STORAGE_CLASS_HEADER)).thenReturn(
+        "STANDARD");
+    keyEndpoint.setHeaders(headers);
     metrics = bucketEndpoint.getMetrics();
   }
 
+  /**
+   * Bucket Level Endpoints.
+   */
+
   @Test
-  public void testHeadBucket() throws Exception {
+  public void testHeadBucketSuccess() throws Exception {
 
     long oriMetric = metrics.getHeadBucketSuccess();
 
@@ -97,7 +111,7 @@ public class TestS3GatewayMetrics {
   }
 
   @Test
-  public void testListBucket() throws Exception {
+  public void testListBucketSuccess() throws Exception {
 
     long oriMetric = metrics.getListS3BucketsSuccess();
 
@@ -107,23 +121,6 @@ public class TestS3GatewayMetrics {
     assertEquals(1L, curMetric - oriMetric);
   }
 
-  @Test
-  public void testHeadObject() throws Exception {
-    String value = RandomStringUtils.randomAlphanumeric(32);
-    OzoneOutputStream out = bucket.createKey("key1",
-        value.getBytes(UTF_8).length, ReplicationType.RATIS,
-        ReplicationFactor.ONE, new HashMap<>());
-    out.write(value.getBytes(UTF_8));
-    out.close();
-
-    long oriMetric = metrics.getHeadKeySuccess();
-
-    keyEndpoint.head(bucketName, "key1");
-
-    long curMetric = metrics.getHeadKeySuccess();
-    assertEquals(1L, curMetric - oriMetric);
-  }
-
   @Test
   public void testGetBucketSuccess() throws Exception {
     long oriMetric = metrics.getGetBucketSuccess();
@@ -262,7 +259,8 @@ public class TestS3GatewayMetrics {
         .getResourceAsStream("userAccessControlList.xml");
 
     try {
-      bucketEndpoint.put("unknown_bucket", ACL_MARKER, headers, inputBody);
+      bucketEndpoint.put("unknown_bucket", ACL_MARKER, headers,
+          inputBody);
       fail();
     } catch (OS3Exception ex) {
     } finally {
@@ -272,11 +270,326 @@ public class TestS3GatewayMetrics {
     assertEquals(1L, curMetric - oriMetric);
   }
 
+
+  /**
+   * Object Level Endpoints.
+   */
+
+  @Test
+  public void testHeadKeySuccess() throws Exception {
+    bucket.createKey(keyName, 0).close();
+
+    long oriMetric = metrics.getHeadKeySuccess();
+
+    keyEndpoint.head(bucketName, keyName);
+
+    long curMetric = metrics.getHeadKeySuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testHeadKeyFailure() throws Exception {
+    long oriMetric = metrics.getHeadKeyFailure();
+
+    keyEndpoint.head(bucketName, "unknownKey");
+
+    long curMetric = metrics.getHeadKeyFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testCreateKeySuccess() throws Exception {
+
+    long oriMetric = metrics.getCreateKeySuccess();
+    // Create an input stream
+    ByteArrayInputStream body =
+        new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
+    // Create the file
+    keyEndpoint.put(bucketName, keyName, CONTENT
+        .length(), 1, null, body);
+    body.close();
+    long curMetric = metrics.getCreateKeySuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testCreateKeyFailure() throws Exception {
+    long oriMetric = metrics.getCreateKeyFailure();
+
+    // Create the file in a bucket that does not exist
+    try {
+      keyEndpoint.put("unknownBucket", keyName, CONTENT
+          .length(), 1, null, null);
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getCreateKeyFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+
+  @Test
+  public void testDeleteKeySuccess() throws Exception {
+    long oriMetric = metrics.getDeleteKeySuccess();
+
+    bucket.createKey(keyName, 0).close();
+    keyEndpoint.delete(bucketName, keyName, null);
+    long curMetric = metrics.getDeleteKeySuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testDeleteKeyFailure() throws Exception {
+    long oriMetric = metrics.getDeleteKeyFailure();
+    try {
+      keyEndpoint.delete("unknownBucket", keyName, null);
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getDeleteKeyFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testGetKeySuccess() throws Exception {
+    long oriMetric = metrics.getGetKeySuccess();
+
+    // Create an input stream
+    ByteArrayInputStream body =
+        new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
+    // Create the file
+    keyEndpoint.put(bucketName, keyName, CONTENT
+        .length(), 1, null, body);
+    // GET the key from the bucket
+    keyEndpoint.get(bucketName, keyName, null, 0,
+        null, body);
+    long curMetric = metrics.getGetKeySuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testGetKeyFailure() throws Exception {
+    long oriMetric = metrics.getGetKeyFailure();
+    // Fetching a non-existent key
+    try {
+      keyEndpoint.get(bucketName, "unknownKey", null, 0,
+          null, null);
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_KEY.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getGetKeyFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testInitMultiPartUploadSuccess() throws Exception {
+
+    long oriMetric = metrics.getInitMultiPartUploadSuccess();
+    keyEndpoint.initializeMultipartUpload(bucketName, keyName);
+    long curMetric = metrics.getInitMultiPartUploadSuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testInitMultiPartUploadFailure() throws Exception {
+    long oriMetric = metrics.getInitMultiPartUploadFailure();
+    try {
+      keyEndpoint.initializeMultipartUpload("unknownBucket", keyName);
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_BUCKET.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getInitMultiPartUploadFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testAbortMultiPartUploadSuccess() throws Exception {
+
+    // Initiate the Upload and fetch the upload ID
+    String uploadID = initiateMultipartUpload(bucketName, keyName);
+
+    long oriMetric = metrics.getAbortMultiPartUploadSuccess();
+
+    // Abort the Upload Successfully by deleting the key using the Upload-Id
+    keyEndpoint.delete(bucketName, keyName, uploadID);
+
+    long curMetric = metrics.getAbortMultiPartUploadSuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testAbortMultiPartUploadFailure() throws Exception {
+    long oriMetric = metrics.getAbortMultiPartUploadFailure();
+
+    // Fail the Abort Method by providing wrong uploadID
+    try {
+      keyEndpoint.delete(bucketName, keyName, "wrongId");
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_UPLOAD.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getAbortMultiPartUploadFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+
+  @Test
+  public void testCompleteMultiPartUploadSuccess() throws Exception {
+
+    // Initiate the Upload and fetch the upload ID
+    String uploadID = initiateMultipartUpload(bucketName, keyName);
+
+    long oriMetric = metrics.getCompleteMultiPartUploadSuccess();
+    // complete multipart upload
+    CompleteMultipartUploadRequest completeMultipartUploadRequest = new
+        CompleteMultipartUploadRequest();
+    Response response = keyEndpoint.completeMultipartUpload(bucketName, keyName,
+        uploadID, completeMultipartUploadRequest);
+    long curMetric = metrics.getCompleteMultiPartUploadSuccess();
+    assertEquals(200, response.getStatus());
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testCompleteMultiPartUploadFailure() throws Exception {
+    long oriMetric = metrics.getCompleteMultiPartUploadFailure();
+    CompleteMultipartUploadRequest completeMultipartUploadRequestNew = new
+        CompleteMultipartUploadRequest();
+    try {
+      keyEndpoint.completeMultipartUpload(bucketName, "key2",
+          "random", completeMultipartUploadRequestNew);
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_UPLOAD.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getCompleteMultiPartUploadFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testCreateMultipartKeySuccess() throws Exception {
+
+    // Initiate the Upload and fetch the upload ID
+    String uploadID = initiateMultipartUpload(bucketName, keyName);
+
+    long oriMetric = metrics.getCreateMultipartKeySuccess();
+    ByteArrayInputStream body =
+        new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
+    keyEndpoint.put(bucketName, keyName, CONTENT.length(),
+        1, uploadID, body);
+    long curMetric = metrics.getCreateMultipartKeySuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testCreateMultipartKeyFailure() throws Exception {
+    long oriMetric = metrics.getCreateMultipartKeyFailure();
+    try {
+      keyEndpoint.put(bucketName, keyName, CONTENT.length(),
+          1, "randomId", null);
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_UPLOAD.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getCreateMultipartKeyFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testListPartsSuccess() throws Exception {
+
+    long oriMetric = metrics.getListPartsSuccess();
+    // Initiate the Upload and fetch the upload ID
+    String uploadID = initiateMultipartUpload(bucketName, keyName);
+
+    // Listing out the parts by providing the uploadID
+    keyEndpoint.get(bucketName, keyName,
+        uploadID, 3, null, null);
+    long curMetric = metrics.getListPartsSuccess();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testListPartsFailure() throws Exception {
+
+    long oriMetric = metrics.getListPartsFailure();
+    try {
+      // Listing out the parts by providing the uploadID after aborting
+      keyEndpoint.get(bucketName, keyName,
+          "wrong_id", 3, null, null);
+      fail();
+    } catch (OS3Exception ex) {
+      assertEquals(S3ErrorTable.NO_SUCH_UPLOAD.getCode(), ex.getCode());
+    }
+    long curMetric = metrics.getListPartsFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
+  @Test
+  public void testCopyObject() throws Exception {
+
+    String destBucket = "b2";
+    String destKey = "key2";
+
+    // Create bucket
+    clientStub.getObjectStore().createS3Bucket(destBucket);
+
+
+    // Test for Success of CopyObjectSuccess Metric
+    long oriMetric = metrics.getCopyObjectSuccess();
+    ByteArrayInputStream body =
+        new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
+
+    keyEndpoint.put(bucketName, keyName,
+        CONTENT.length(), 1, null, body);
+
+    // Add copy header, and then call put
+    when(headers.getHeaderString(COPY_SOURCE_HEADER)).thenReturn(
+        bucketName + "/" + urlEncode(keyName));
+
+    keyEndpoint.put(destBucket, destKey, CONTENT.length(), 1,
+        null, body);
+    long curMetric = metrics.getCopyObjectSuccess();
+    assertEquals(1L, curMetric - oriMetric);
+
+    // Test for Failure of CopyObjectFailure Metric
+    oriMetric = metrics.getCopyObjectFailure();
+    // source and dest same
+    try {
+      when(headers.getHeaderString(STORAGE_CLASS_HEADER)).thenReturn("");
+      keyEndpoint.put(bucketName, keyName, CONTENT.length(), 1, null, body);
+      fail("Test for CopyObjectMetric failed");
+    } catch (OS3Exception ex) {
+      Assert.assertTrue(ex.getErrorMessage().contains("This copy request is " +
+          "illegal"));
+    }
+    curMetric = metrics.getCopyObjectFailure();
+    assertEquals(1L, curMetric - oriMetric);
+  }
+
   private OzoneClient createClientWithKeys(String... keys) throws IOException {
-    OzoneBucket bkt = clientStub.getObjectStore().getS3Bucket(bucketName);
     for (String key : keys) {
-      bkt.createKey(key, 0).close();
+      bucket.createKey(key, 0).close();
     }
     return clientStub;
   }
+
+  private String initiateMultipartUpload(String bktName, String key)
+      throws IOException,
+      OS3Exception {
+    // Initiate the Upload
+    Response response =
+        keyEndpoint.initializeMultipartUpload(bktName, key);
+    MultipartUploadInitiateResponse multipartUploadInitiateResponse =
+        (MultipartUploadInitiateResponse) response.getEntity();
+    if (response.getStatus() == 200) {
+      // Fetch the Upload-Id
+      String uploadID = multipartUploadInitiateResponse.getUploadID();
+      return uploadID;
+    }
+    return "Invalid-Id";
+  }
 }
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ozone.apache.org
For additional commands, e-mail: commits-help@ozone.apache.org