You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-issues@hadoop.apache.org by GitBox <gi...@apache.org> on 2021/08/27 07:16:40 UTC

[GitHub] [hadoop] mehakmeet commented on a change in pull request #3312: HADOOP-17851 Support user specified content encoding for S3A

mehakmeet commented on a change in pull request #3312:
URL: https://github.com/apache/hadoop/pull/3312#discussion_r697168862



##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContentEncoding.java
##########
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.impl.HeaderProcessing.XA_CONTENT_ENCODING;

Review comment:
       This would be a static import.

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
##########
@@ -39,8 +39,9 @@ protected Configuration createConfiguration() {
     // get the KMS key for this test.
     Configuration c = new Configuration();
     String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY);
-    if (StringUtils.isBlank(kmsKey) || !c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM)
-        .equals(S3AEncryptionMethods.CSE_KMS.name())) {
+    String encryptionAlgorithm = c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM);
+    if (kmsKey == null || StringUtils.isBlank(kmsKey) || encryptionAlgorithm == null ||
+        !encryptionAlgorithm.equals(S3AEncryptionMethods.CSE_KMS.name())) {

Review comment:
       This is quite interesting. 
   `kmsKey` doesn't need a null check since `StringUtils.isBlank(kmsKey)`, checks that as well. But, `encryptionAlgorithm`, does need it. A more simple way would be to just replace `c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM)` in the if condition in the original line to `c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM, "")`, so that we have "" as default rather than null. Good catch~

##########
File path: hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RequestFactoryImpl.java
##########
@@ -586,6 +604,9 @@ public static RequestFactoryBuilder builder() {
     /** Requester Pays flag. */
     private boolean requesterPays = false;
 
+    /** Content Encoding. */
+    private String contentEncoding = null;

Review comment:
       no need to set null.

##########
File path: hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
##########
@@ -925,12 +925,16 @@ protected RequestFactory createRequestFactory() {
     // request factory.
     initCannedAcls(getConf());
 
+    // Any encoding type
+    String contentEncoding = getConf().get(CONTENT_ENCODING, DEFAULT_CONTENT_ENCODING);

Review comment:
       maybe use getTrimmed()

##########
File path: hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
##########
@@ -410,6 +410,10 @@ private Constants() {
   public static final String CANNED_ACL = "fs.s3a.acl.default";
   public static final String DEFAULT_CANNED_ACL = "";
 
+  // gzip, deflate, compress, br, etc.
+  public static final String CONTENT_ENCODING = "fs.s3a.content.encoding";
+  public static final String DEFAULT_CONTENT_ENCODING = null;

Review comment:
       If the default is "null", do we even need to make a constant for that? If nothing is set the get() from the config would make the variable null anyways. It's good that we are mentioning that in the comments tho.
   Also, what are the more options in `etc.`? 

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContentEncoding.java
##########
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.impl.HeaderProcessing.XA_CONTENT_ENCODING;
+import org.apache.hadoop.fs.s3a.impl.StoreContext;
+
+import static org.apache.hadoop.fs.s3a.Constants.CONTENT_ENCODING;
+
+/**
+ * Tests of content encoding object meta data.
+ */
+public class ITestS3AContentEncoding extends AbstractS3ATestBase {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ITestS3ACannedACLs.class);
+
+  @Override
+  protected Configuration createConfiguration() {
+    Configuration conf = super.createConfiguration();
+    conf.set(CONTENT_ENCODING, "gzip");
+
+    return conf;
+  }
+
+  @Test
+  public void testCreatedObjectsHaveEncoding() throws Throwable {
+    S3AFileSystem fs = getFileSystem();
+    Path dir = methodPath();
+    fs.mkdirs(dir);
+    Path path = new Path(dir, "1");
+    ContractTestUtils.touch(fs, path);
+    assertObjectHasEncoding(path);
+    Path path2 = new Path(dir, "2");
+    fs.rename(path, path2);
+    assertObjectHasEncoding(path);
+  }
+
+  /**
+   * Assert that a given object has gzip encoding specified.
+   * @param path path
+   */
+  private void assertObjectHasEncoding(Path path) {
+    S3AFileSystem fs = getFileSystem();
+
+    StoreContext storeContext = fs.createStoreContext();
+    String key = storeContext.pathToKey(path);
+    String encoding = fs.getXAttrs(XA_CONTENT_ENCODING);

Review comment:
       Not sure about how this test would work. File attributes would relate to a path, which would return `Map<String, byte[]>`. Then we would need to get `XA_CONTENT_ENCODING`'s value from the map and decode the `byte[]` to get the actual value. I recently stumbled upon its use too, a little confusing to say the least, you can check `ITestS3AClientSideEncryptionKms#assertEncrypted(path)` where I used file attributes as well in a test for reference.

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContentEncoding.java
##########
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.impl.HeaderProcessing.XA_CONTENT_ENCODING;
+import org.apache.hadoop.fs.s3a.impl.StoreContext;
+
+import static org.apache.hadoop.fs.s3a.Constants.CONTENT_ENCODING;
+
+/**
+ * Tests of content encoding object meta data.
+ */
+public class ITestS3AContentEncoding extends AbstractS3ATestBase {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ITestS3ACannedACLs.class);

Review comment:
       Logger never used, and not the right class. 

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
##########
@@ -45,8 +45,9 @@
   public void setup() throws Exception {
     Configuration c = new Configuration();
     String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY);
-    if (StringUtils.isBlank(kmsKey) || !c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM)
-        .equals(S3AEncryptionMethods.CSE_KMS.name())) {
+    String encryptionAlgorithm = c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM);
+    if (kmsKey == null || StringUtils.isBlank(kmsKey) || encryptionAlgorithm == null ||

Review comment:
       Same as stated above.

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContentEncoding.java
##########
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.impl.HeaderProcessing.XA_CONTENT_ENCODING;
+import org.apache.hadoop.fs.s3a.impl.StoreContext;
+
+import static org.apache.hadoop.fs.s3a.Constants.CONTENT_ENCODING;
+
+/**
+ * Tests of content encoding object meta data.
+ */
+public class ITestS3AContentEncoding extends AbstractS3ATestBase {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ITestS3ACannedACLs.class);
+
+  @Override
+  protected Configuration createConfiguration() {
+    Configuration conf = super.createConfiguration();
+    conf.set(CONTENT_ENCODING, "gzip");
+
+    return conf;
+  }
+
+  @Test
+  public void testCreatedObjectsHaveEncoding() throws Throwable {
+    S3AFileSystem fs = getFileSystem();
+    Path dir = methodPath();
+    fs.mkdirs(dir);
+    Path path = new Path(dir, "1");
+    ContractTestUtils.touch(fs, path);
+    assertObjectHasEncoding(path);
+    Path path2 = new Path(dir, "2");
+    fs.rename(path, path2);
+    assertObjectHasEncoding(path);
+  }
+
+  /**
+   * Assert that a given object has gzip encoding specified.
+   * @param path path
+   */
+  private void assertObjectHasEncoding(Path path) {
+    S3AFileSystem fs = getFileSystem();
+
+    StoreContext storeContext = fs.createStoreContext();
+    String key = storeContext.pathToKey(path);
+    String encoding = fs.getXAttrs(XA_CONTENT_ENCODING);
+    Assertions.assertThat(encoding)
+        .describedAs("Encoding of object %s is gzip", path)

Review comment:
       the description here would represent what the error message would say in case of an assert failure. So, maybe something like "Mismatch in the encoding of object %s"?

##########
File path: hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
##########
@@ -925,12 +925,16 @@ protected RequestFactory createRequestFactory() {
     // request factory.
     initCannedAcls(getConf());
 
+    // Any encoding type
+    String contentEncoding = getConf().get(CONTENT_ENCODING, DEFAULT_CONTENT_ENCODING);

Review comment:
       What would happen if I pass any random value via the property? Should there be a check of any kind that verifies if the passed value is valid?

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AContentEncoding.java
##########
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.impl.HeaderProcessing.XA_CONTENT_ENCODING;
+import org.apache.hadoop.fs.s3a.impl.StoreContext;
+
+import static org.apache.hadoop.fs.s3a.Constants.CONTENT_ENCODING;
+
+/**
+ * Tests of content encoding object meta data.
+ */
+public class ITestS3AContentEncoding extends AbstractS3ATestBase {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ITestS3ACannedACLs.class);
+
+  @Override
+  protected Configuration createConfiguration() {
+    Configuration conf = super.createConfiguration();
+    conf.set(CONTENT_ENCODING, "gzip");
+
+    return conf;
+  }
+
+  @Test
+  public void testCreatedObjectsHaveEncoding() throws Throwable {
+    S3AFileSystem fs = getFileSystem();
+    Path dir = methodPath();
+    fs.mkdirs(dir);
+    Path path = new Path(dir, "1");
+    ContractTestUtils.touch(fs, path);
+    assertObjectHasEncoding(path);
+    Path path2 = new Path(dir, "2");
+    fs.rename(path, path2);
+    assertObjectHasEncoding(path);

Review comment:
       Did you mean to assert here for path2?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-issues-help@hadoop.apache.org