You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by ad...@apache.org on 2020/12/10 12:08:03 UTC

[ozone] branch master updated: HDDS-4559. Avoid using hard coding uft-8 charset (#1673)

This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new 7eefe24  HDDS-4559. Avoid using hard coding uft-8 charset (#1673)
7eefe24 is described below

commit 7eefe24a136a2b1f8a32b6a063581ad5047f0aa2
Author: lamber-ken <la...@163.com>
AuthorDate: Thu Dec 10 20:07:45 2020 +0800

    HDDS-4559. Avoid using hard coding uft-8 charset (#1673)
---
 .../java/org/apache/hadoop/ozone/OzoneConsts.java  |  4 +++-
 .../hadoop/hdds/conf/ConfigFileAppender.java       |  4 +++-
 .../container/common/helpers/DatanodeIdYaml.java   |  3 ++-
 .../container/keyvalue/TestTarContainerPacker.java | 11 +++++-----
 .../hadoop/hdds/server/http/HtmlQuoting.java       |  2 +-
 .../hadoop/hdds/server/http/HttpServer2.java       |  3 ++-
 .../client/TestDefaultCertificateClient.java       | 24 ++++++++++++----------
 .../hdds/scm/metadata/X509CertificateCodec.java    |  6 +++---
 .../apache/hadoop/ozone/web/utils/OzoneUtils.java  |  4 ++--
 .../org/apache/hadoop/ozone/csi/NodeService.java   |  5 +++--
 .../client/rpc/TestOzoneAtRestEncryption.java      | 20 ++++++++++--------
 .../org/apache/hadoop/ozone/om/OzoneManager.java   |  3 ++-
 .../hadoop/ozone/security/AWSV4AuthValidator.java  |  9 ++++----
 .../hadoop/ozone/s3/AWSSignatureProcessor.java     |  6 ++++--
 .../hadoop/ozone/s3/OzoneClientProducer.java       |  2 +-
 .../apache/hadoop/ozone/s3/SignatureProcessor.java |  2 --
 .../ozone/s3/TestSignedChunksInputStream.java      | 12 +++++------
 .../ozone/genesis/BenchMarkMetadataStoreReads.java |  8 ++++----
 .../genesis/BenchMarkMetadataStoreWrites.java      |  6 +++---
 .../ozone/genesis/BenchMarkRocksDbStore.java       |  8 ++++----
 .../org/apache/hadoop/ozone/scm/cli/SQLCLI.java    |  2 --
 21 files changed, 77 insertions(+), 67 deletions(-)

diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
index a7aca16..07e2815 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.ratis.thirdparty.io.grpc.Context;
 import org.apache.ratis.thirdparty.io.grpc.Metadata;
 
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.regex.Pattern;
 
 import static org.apache.ratis.thirdparty.io.grpc.Metadata.ASCII_STRING_MARSHALLER;
@@ -351,7 +353,7 @@ public final class OzoneConsts {
   public static final String GDPR_FLAG = "gdprEnabled";
   public static final String GDPR_ALGORITHM_NAME = "AES";
   public static final int GDPR_DEFAULT_RANDOM_SECRET_LENGTH = 16;
-  public static final String GDPR_CHARSET = "UTF-8";
+  public static final Charset GDPR_CHARSET = StandardCharsets.UTF_8;
   public static final String GDPR_LENGTH = "length";
   public static final String GDPR_SECRET = "secret";
   public static final String GDPR_ALGORITHM = "algorithm";
diff --git a/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java b/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
index 9f1c087..4256ac8 100644
--- a/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
+++ b/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
@@ -28,6 +28,7 @@ import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamResult;
 import java.io.InputStream;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.stream.Collectors;
 
@@ -117,7 +118,8 @@ public class ConfigFileAppender {
       factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
       Transformer transformer = factory.newTransformer();
 
-      transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
+      transformer.setOutputProperty(OutputKeys.ENCODING,
+              StandardCharsets.UTF_8.name());
       transformer.setOutputProperty(OutputKeys.INDENT, "yes");
       transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount",
           "2");
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
index 2d4ece2..3b14641 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DatanodeIdYaml.java
@@ -23,6 +23,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.UUID;
@@ -58,7 +59,7 @@ public final class DatanodeIdYaml {
     Yaml yaml = new Yaml(options);
 
     try (Writer writer = new OutputStreamWriter(
-        new FileOutputStream(path), "UTF-8")) {
+        new FileOutputStream(path), StandardCharsets.UTF_8)) {
       yaml.dump(getDatanodeDetailsYaml(datanodeDetails), writer);
     }
   }
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
index bee77c7..d248ac1 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
@@ -23,7 +23,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -55,7 +55,6 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.commons.compress.compressors.CompressorStreamFactory.GZIP;
 
 /**
@@ -187,7 +186,7 @@ public class TestTarContainerPacker {
     //read the container descriptor only
     try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
       String containerYaml = new String(packer.unpackContainerDescriptor(input),
-          Charset.forName(UTF_8.name()));
+          StandardCharsets.UTF_8);
       Assert.assertEquals(TEST_DESCRIPTOR_FILE_CONTENT, containerYaml);
     }
 
@@ -203,7 +202,7 @@ public class TestTarContainerPacker {
     try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
       descriptor =
           new String(packer.unpackContainerData(destinationContainer, input),
-              Charset.forName(UTF_8.name()));
+              StandardCharsets.UTF_8);
     }
 
     assertExampleMetadataDbIsGood(
@@ -359,7 +358,7 @@ public class TestTarContainerPacker {
 
     try (FileInputStream testFile = new FileInputStream(dbFile.toFile())) {
       List<String> strings = IOUtils
-          .readLines(testFile, Charset.forName(UTF_8.name()));
+          .readLines(testFile, StandardCharsets.UTF_8);
       Assert.assertEquals(1, strings.size());
       Assert.assertEquals(TEST_DB_FILE_CONTENT, strings.get(0));
     }
@@ -377,7 +376,7 @@ public class TestTarContainerPacker {
 
     try (FileInputStream testFile = new FileInputStream(chunkFile.toFile())) {
       List<String> strings = IOUtils
-          .readLines(testFile, Charset.forName(UTF_8.name()));
+          .readLines(testFile, StandardCharsets.UTF_8);
       Assert.assertEquals(1, strings.size());
       Assert.assertEquals(TEST_CHUNK_FILE_CONTENT, strings.get(0));
     }
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java
index f4262f9..44a1d00 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HtmlQuoting.java
@@ -127,7 +127,7 @@ public final class HtmlQuoting {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
       try {
         quoteHtmlChars(buffer, bytes, 0, bytes.length);
-        return buffer.toString("UTF-8");
+        return buffer.toString(StandardCharsets.UTF_8.name());
       } catch (IOException ioe) {
         // Won't happen, since it is a bytearrayoutputstream
         return null;
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
index 9282c84..9aad94a 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -38,6 +38,7 @@ import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Enumeration;
@@ -1522,7 +1523,7 @@ public final class HttpServer2 implements FilterContainer {
       }
       response.setContentType("text/plain; charset=UTF-8");
       try (PrintStream out = new PrintStream(
-          response.getOutputStream(), false, "UTF-8")) {
+          response.getOutputStream(), false, StandardCharsets.UTF_8.name())) {
         ReflectionUtils.printThreadInfo(out, "");
       }
       ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestDefaultCertificateClient.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestDefaultCertificateClient.java
index f389cdb..d841359 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestDefaultCertificateClient.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestDefaultCertificateClient.java
@@ -28,6 +28,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -75,7 +76,6 @@ public class TestDefaultCertificateClient {
   private Path dnMetaDirPath;
   private SecurityConfig omSecurityConfig;
   private SecurityConfig dnSecurityConfig;
-  private final static String UTF = "UTF-8";
   private final static String DN_COMPONENT = DNCertificateClient.COMPONENT_NAME;
   private final static String OM_COMPONENT = OMCertificateClient.COMPONENT_NAME;
   private KeyCodec omKeyCodec;
@@ -201,7 +201,7 @@ public class TestDefaultCertificateClient {
 
   @Test
   public void testSignDataStream() throws Exception {
-    String data = RandomStringUtils.random(100, UTF);
+    String data = RandomStringUtils.random(100, StandardCharsets.UTF_8.name());
     FileUtils.deleteQuietly(Paths.get(
         omSecurityConfig.getKeyLocation(OM_COMPONENT).toString(),
         omSecurityConfig.getPrivateKeyFileName()).toFile());
@@ -213,11 +213,11 @@ public class TestDefaultCertificateClient {
     LambdaTestUtils.intercept(IOException.class, "Error while " +
             "signing the stream",
         () -> omCertClient.signDataStream(IOUtils.toInputStream(data,
-            UTF)));
+            StandardCharsets.UTF_8)));
 
     generateKeyPairFiles();
     byte[] sign = omCertClient.signDataStream(IOUtils.toInputStream(data,
-        UTF));
+        StandardCharsets.UTF_8));
     validateHash(sign, data.getBytes());
   }
 
@@ -239,21 +239,22 @@ public class TestDefaultCertificateClient {
    */
   @Test
   public void verifySignatureStream() throws Exception {
-    String data = RandomStringUtils.random(500, UTF);
+    String data = RandomStringUtils.random(500, StandardCharsets.UTF_8.name());
     byte[] sign = omCertClient.signDataStream(IOUtils.toInputStream(data,
-        UTF));
+        StandardCharsets.UTF_8));
 
     // Positive tests.
     assertTrue(omCertClient.verifySignature(data.getBytes(), sign,
         x509Certificate));
-    assertTrue(omCertClient.verifySignature(IOUtils.toInputStream(data, UTF),
+    assertTrue(omCertClient.verifySignature(
+        IOUtils.toInputStream(data, StandardCharsets.UTF_8),
         sign, x509Certificate));
 
     // Negative tests.
     assertFalse(omCertClient.verifySignature(data.getBytes(),
         "abc".getBytes(), x509Certificate));
     assertFalse(omCertClient.verifySignature(IOUtils.toInputStream(data,
-        UTF), "abc".getBytes(), x509Certificate));
+        StandardCharsets.UTF_8), "abc".getBytes(), x509Certificate));
 
   }
 
@@ -262,20 +263,21 @@ public class TestDefaultCertificateClient {
    */
   @Test
   public void verifySignatureDataArray() throws Exception {
-    String data = RandomStringUtils.random(500, UTF);
+    String data = RandomStringUtils.random(500, StandardCharsets.UTF_8.name());
     byte[] sign = omCertClient.signData(data.getBytes());
 
     // Positive tests.
     assertTrue(omCertClient.verifySignature(data.getBytes(), sign,
         x509Certificate));
-    assertTrue(omCertClient.verifySignature(IOUtils.toInputStream(data, UTF),
+    assertTrue(omCertClient.verifySignature(
+        IOUtils.toInputStream(data, StandardCharsets.UTF_8),
         sign, x509Certificate));
 
     // Negative tests.
     assertFalse(omCertClient.verifySignature(data.getBytes(),
         "abc".getBytes(), x509Certificate));
     assertFalse(omCertClient.verifySignature(IOUtils.toInputStream(data,
-        UTF), "abc".getBytes(), x509Certificate));
+        StandardCharsets.UTF_8), "abc".getBytes(), x509Certificate));
 
   }
 
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
index 8c30a43..9bfa7d6 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
@@ -20,7 +20,7 @@
 package org.apache.hadoop.hdds.scm.metadata;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
@@ -35,7 +35,7 @@ public class X509CertificateCodec implements Codec<X509Certificate> {
   public byte[] toPersistedFormat(X509Certificate object) throws IOException {
     try {
       return CertificateCodec.getPEMEncodedString(object)
-          .getBytes(Charset.forName("UTF-8"));
+          .getBytes(StandardCharsets.UTF_8);
     } catch (SCMSecurityException exp) {
       throw new IOException(exp);
     }
@@ -45,7 +45,7 @@ public class X509CertificateCodec implements Codec<X509Certificate> {
   public X509Certificate fromPersistedFormat(byte[] rawData)
       throws IOException {
     try{
-      String s = new String(rawData, Charset.forName("UTF-8"));
+      String s = new String(rawData, StandardCharsets.UTF_8);
       return CertificateCodec.getX509Certificate(s);
     } catch (CertificateException exp) {
       throw new IOException(exp);
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
index 1cdea8b..9bd8398 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.ozone.web.utils;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Locale;
@@ -42,8 +43,7 @@ import org.apache.ratis.util.TimeDuration;
 @InterfaceAudience.Private
 public final class OzoneUtils {
 
-  public static final String ENCODING_NAME = "UTF-8";
-  public static final Charset ENCODING = Charset.forName(ENCODING_NAME);
+  public static final Charset ENCODING = StandardCharsets.UTF_8;
 
   private OzoneUtils() {
     // Never constructed
diff --git a/hadoop-ozone/csi/src/main/java/org/apache/hadoop/ozone/csi/NodeService.java b/hadoop-ozone/csi/src/main/java/org/apache/hadoop/ozone/csi/NodeService.java
index 45784a4..0665a79 100644
--- a/hadoop-ozone/csi/src/main/java/org/apache/hadoop/ozone/csi/NodeService.java
+++ b/hadoop-ozone/csi/src/main/java/org/apache/hadoop/ozone/csi/NodeService.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.ozone.csi;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.concurrent.TimeUnit;
@@ -86,8 +87,8 @@ public class NodeService extends NodeImplBase {
     exec.waitFor(10, TimeUnit.SECONDS);
 
     LOG.info("Command is executed with  stdout: {}, stderr: {}",
-        IOUtils.toString(exec.getInputStream(), "UTF-8"),
-        IOUtils.toString(exec.getErrorStream(), "UTF-8"));
+        IOUtils.toString(exec.getInputStream(), StandardCharsets.UTF_8),
+        IOUtils.toString(exec.getErrorStream(), StandardCharsets.UTF_8));
     if (exec.exitValue() != 0) {
       throw new RuntimeException(String
           .format("Return code of the command %s was %d", command,
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneAtRestEncryption.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneAtRestEncryption.java
index 324db98..7aced89 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneAtRestEncryption.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneAtRestEncryption.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.ozone.client.rpc;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.time.Instant;
 import java.util.HashMap;
@@ -177,9 +178,10 @@ public class TestOzoneAtRestEncryption extends TestOzoneRpcClient {
       String keyName = UUID.randomUUID().toString();
 
       try (OzoneOutputStream out = bucket.createKey(keyName,
-          value.getBytes("UTF-8").length, ReplicationType.STAND_ALONE,
+          value.getBytes(StandardCharsets.UTF_8).length,
+          ReplicationType.STAND_ALONE,
           ReplicationFactor.ONE, new HashMap<>())) {
-        out.write(value.getBytes("UTF-8"));
+        out.write(value.getBytes(StandardCharsets.UTF_8));
       }
 
       OzoneKey key = bucket.getKey(keyName);
@@ -188,7 +190,7 @@ public class TestOzoneAtRestEncryption extends TestOzoneRpcClient {
       int len = 0;
 
       try(OzoneInputStream is = bucket.readKey(keyName)) {
-        fileContent = new byte[value.getBytes("UTF-8").length];
+        fileContent = new byte[value.getBytes(StandardCharsets.UTF_8).length];
         len = is.read(fileContent);
       }
 
@@ -196,7 +198,8 @@ public class TestOzoneAtRestEncryption extends TestOzoneRpcClient {
       Assert.assertTrue(verifyRatisReplication(volumeName, bucketName,
           keyName, ReplicationType.STAND_ALONE,
           ReplicationFactor.ONE));
-      Assert.assertEquals(value, new String(fileContent, "UTF-8"));
+      Assert.assertEquals(value, new String(fileContent,
+          StandardCharsets.UTF_8));
       Assert.assertFalse(key.getCreationTime().isBefore(testStartTime));
       Assert.assertFalse(key.getModificationTime().isBefore(testStartTime));
     }
@@ -235,9 +238,10 @@ public class TestOzoneAtRestEncryption extends TestOzoneRpcClient {
     Map<String, String> keyMetadata = new HashMap<>();
     keyMetadata.put(OzoneConsts.GDPR_FLAG, "true");
     try (OzoneOutputStream out = bucket.createKey(keyName,
-        value.getBytes("UTF-8").length, ReplicationType.STAND_ALONE,
+        value.getBytes(StandardCharsets.UTF_8).length,
+        ReplicationType.STAND_ALONE,
         ReplicationFactor.ONE, keyMetadata)) {
-      out.write(value.getBytes("UTF-8"));
+      out.write(value.getBytes(StandardCharsets.UTF_8));
     }
 
     OzoneKeyDetails key = bucket.getKey(keyName);
@@ -246,7 +250,7 @@ public class TestOzoneAtRestEncryption extends TestOzoneRpcClient {
     int len = 0;
 
     try(OzoneInputStream is = bucket.readKey(keyName)) {
-      fileContent = new byte[value.getBytes("UTF-8").length];
+      fileContent = new byte[value.getBytes(StandardCharsets.UTF_8).length];
       len = is.read(fileContent);
     }
 
@@ -254,7 +258,7 @@ public class TestOzoneAtRestEncryption extends TestOzoneRpcClient {
     Assert.assertTrue(verifyRatisReplication(volumeName, bucketName,
         keyName, ReplicationType.STAND_ALONE,
         ReplicationFactor.ONE));
-    Assert.assertEquals(value, new String(fileContent, "UTF-8"));
+    Assert.assertEquals(value, new String(fileContent, StandardCharsets.UTF_8));
     Assert.assertFalse(key.getCreationTime().isBefore(testStartTime));
     Assert.assertFalse(key.getModificationTime().isBefore(testStartTime));
     Assert.assertEquals("true", key.getMetadata().get(OzoneConsts.GDPR_FLAG));
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
index b6d0876..b5191ce 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
@@ -25,6 +25,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.StandardCopyOption;
@@ -640,7 +641,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
           getTempMetricsStorageFile().getParentFile().toPath());
       try (BufferedWriter writer = new BufferedWriter(
           new OutputStreamWriter(new FileOutputStream(
-              getTempMetricsStorageFile()), "UTF-8"))) {
+              getTempMetricsStorageFile()), StandardCharsets.UTF_8))) {
         OmMetricsInfo metricsInfo = new OmMetricsInfo();
         metricsInfo.setNumKeys(metrics.getNumKeys());
         WRITER.writeValue(writer, metricsInfo);
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/security/AWSV4AuthValidator.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/security/AWSV4AuthValidator.java
index 575c9ea..0a0e947 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/security/AWSV4AuthValidator.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/security/AWSV4AuthValidator.java
@@ -26,7 +26,6 @@ import javax.crypto.Mac;
 import javax.crypto.spec.SecretKeySpec;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
-import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.security.GeneralSecurityException;
 import java.security.MessageDigest;
@@ -42,14 +41,13 @@ final class AWSV4AuthValidator {
   private final static Logger LOG =
       LoggerFactory.getLogger(AWSV4AuthValidator.class);
   private static final String HMAC_SHA256_ALGORITHM = "HmacSHA256";
-  private static final Charset UTF_8 = Charset.forName("utf-8");
 
   private AWSV4AuthValidator() {
   }
 
   private static String urlDecode(String str) {
     try {
-      return URLDecoder.decode(str, UTF_8.name());
+      return URLDecoder.decode(str, StandardCharsets.UTF_8.name());
     } catch (UnsupportedEncodingException e) {
       throw new RuntimeException(e);
     }
@@ -57,7 +55,7 @@ final class AWSV4AuthValidator {
 
   public static String hash(String payload) throws NoSuchAlgorithmException {
     MessageDigest md = MessageDigest.getInstance("SHA-256");
-    md.update(payload.getBytes(UTF_8));
+    md.update(payload.getBytes(StandardCharsets.UTF_8));
     return String.format("%064x", new java.math.BigInteger(1, md.digest()));
   }
 
@@ -91,7 +89,8 @@ final class AWSV4AuthValidator {
     String dateStamp = signData[0];
     String regionName = signData[1];
     String serviceName = signData[2];
-    byte[] kDate = sign(("AWS4" + key).getBytes(UTF_8), dateStamp);
+    byte[] kDate = sign(("AWS4" + key)
+        .getBytes(StandardCharsets.UTF_8), dateStamp);
     byte[] kRegion = sign(kDate, regionName);
     byte[] kService = sign(kRegion, serviceName);
     byte[] kSigning = sign(kService, "aws4_request");
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/AWSSignatureProcessor.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/AWSSignatureProcessor.java
index 4d45101..26c1a3e 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/AWSSignatureProcessor.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/AWSSignatureProcessor.java
@@ -28,6 +28,7 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLEncoder;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.time.LocalDate;
@@ -54,6 +55,7 @@ import org.apache.kerby.util.Hex;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+
 /**
  * Parser to process AWS V2 & V4 auth request. Creates string to sign and auth
  * header. For more details refer to AWS documentation https://docs.aws
@@ -309,7 +311,7 @@ public class AWSSignatureProcessor implements SignatureProcessor {
   private String urlEncode(String str) {
     try {
 
-      return URLEncoder.encode(str, UTF_8.name())
+      return URLEncoder.encode(str, StandardCharsets.UTF_8.name())
           .replaceAll("\\+", "%20")
           .replaceAll("%7E", "~");
     } catch (UnsupportedEncodingException e) {
@@ -340,7 +342,7 @@ public class AWSSignatureProcessor implements SignatureProcessor {
 
   public static String hash(String payload) throws NoSuchAlgorithmException {
     MessageDigest md = MessageDigest.getInstance("SHA-256");
-    md.update(payload.getBytes(UTF_8));
+    md.update(payload.getBytes(StandardCharsets.UTF_8));
     return Hex.encode(md.digest()).toLowerCase();
   }
 
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/OzoneClientProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/OzoneClientProducer.java
index 364d263..04bc950 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/OzoneClientProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/OzoneClientProducer.java
@@ -36,8 +36,8 @@ import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3AUTHINFO;
-import static org.apache.hadoop.ozone.s3.SignatureProcessor.UTF_8;
 import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.INTERNAL_ERROR;
 import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.MALFORMED_HEADER;
 import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.S3_AUTHINFO_CREATION_ERROR;
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/SignatureProcessor.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/SignatureProcessor.java
index e3cb6af..5e2e3fb 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/SignatureProcessor.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/SignatureProcessor.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.ozone.s3;
 
-import java.nio.charset.Charset;
 import java.time.ZoneOffset;
 import java.time.format.DateTimeFormatter;
 
@@ -32,7 +31,6 @@ public interface SignatureProcessor {
   String X_AMAZ_DATE = "X-Amz-Date";
   String CONTENT_MD5 = "content-md5";
   String AUTHORIZATION_HEADER = "Authorization";
-  Charset UTF_8 = Charset.forName("utf-8");
   String X_AMZ_CONTENT_SHA256 = "X-Amz-Content-SHA256";
   String HOST = "host";
 
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestSignedChunksInputStream.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestSignedChunksInputStream.java
index 3599c05..8dcfe59 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestSignedChunksInputStream.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestSignedChunksInputStream.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.s3;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.io.IOUtils;
 import org.junit.Assert;
@@ -36,14 +36,14 @@ public class TestSignedChunksInputStream {
     InputStream is = fileContent("0;chunk-signature"
         +
         "=23abb2bd920ddeeaac78a63ed808bc59fa6e7d3ef0e356474b82cdc2f8c93c40");
-    String result = IOUtils.toString(is, Charset.forName("UTF-8"));
+    String result = IOUtils.toString(is, StandardCharsets.UTF_8);
     Assert.assertEquals("", result);
 
     is = fileContent("0;chunk-signature"
         +
         "=23abb2bd920ddeeaac78a63ed808bc59fa6e7d3ef0e356474b82cdc2f8c93c40\r"
         + "\n");
-    result = IOUtils.toString(is, Charset.forName("UTF-8"));
+    result = IOUtils.toString(is, StandardCharsets.UTF_8);
     Assert.assertEquals("", result);
   }
 
@@ -54,7 +54,7 @@ public class TestSignedChunksInputStream {
         +
         "=23abb2bd920ddeeaac78a63ed808bc59fa6e7d3ef0e356474b82cdc2f8c93c40\r"
         + "\n1234567890\r\n");
-    String result = IOUtils.toString(is, Charset.forName("UTF-8"));
+    String result = IOUtils.toString(is, StandardCharsets.UTF_8);
     Assert.assertEquals("1234567890", result);
 
     //test read(byte[],int,int)
@@ -74,7 +74,7 @@ public class TestSignedChunksInputStream {
         +
         "=23abb2bd920ddeeaac78a63ed808bc59fa6e7d3ef0e356474b82cdc2f8c93c40\r"
         + "\n1234567890");
-    String result = IOUtils.toString(is, Charset.forName("UTF-8"));
+    String result = IOUtils.toString(is, StandardCharsets.UTF_8);
     Assert.assertEquals("1234567890", result);
 
     //test read(byte[],int,int)
@@ -94,7 +94,7 @@ public class TestSignedChunksInputStream {
         + "1234567890\r\n"
         + "05;chunk-signature=signature\r\n"
         + "abcde\r\n");
-    String result = IOUtils.toString(is, Charset.forName("UTF-8"));
+    String result = IOUtils.toString(is, StandardCharsets.UTF_8);
     Assert.assertEquals("1234567890abcde", result);
 
     //test read(byte[],int,int)
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java
index bf40ebc..b810da2 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreReads.java
@@ -27,7 +27,7 @@ import org.openjdk.jmh.annotations.State;
 import org.openjdk.jmh.infra.Blackhole;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_10MB_TYPE;
 import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_1GB_TYPE;
@@ -52,9 +52,9 @@ public class BenchMarkMetadataStoreReads {
   public void initialize() throws IOException {
     store = GenesisUtil.getMetadataStore(this.type);
     byte[] data = RandomStringUtils.randomAlphanumeric(DATA_LEN)
-        .getBytes(Charset.forName("UTF-8"));
+        .getBytes(StandardCharsets.UTF_8);
     for (int x = 0; x < MAX_KEYS; x++) {
-      store.put(Long.toHexString(x).getBytes(Charset.forName("UTF-8")), data);
+      store.put(Long.toHexString(x).getBytes(StandardCharsets.UTF_8), data);
     }
     if (type.compareTo(CLOSED_TYPE) == 0) {
       store.compactDB();
@@ -65,6 +65,6 @@ public class BenchMarkMetadataStoreReads {
   public void test(Blackhole bh) throws IOException {
     long x = org.apache.commons.lang3.RandomUtils.nextLong(0L, MAX_KEYS);
     bh.consume(
-        store.get(Long.toHexString(x).getBytes(Charset.forName("UTF-8"))));
+        store.get(Long.toHexString(x).getBytes(StandardCharsets.UTF_8)));
   }
 }
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java
index aa7aedd..51010ec 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkMetadataStoreWrites.java
@@ -26,7 +26,7 @@ import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.State;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_10MB_TYPE;
 import static org.apache.hadoop.ozone.genesis.GenesisUtil.CACHE_1GB_TYPE;
@@ -50,13 +50,13 @@ public class BenchMarkMetadataStoreWrites {
   @Setup
   public void initialize() throws IOException {
     data = RandomStringUtils.randomAlphanumeric(DATA_LEN)
-        .getBytes(Charset.forName("UTF-8"));
+        .getBytes(StandardCharsets.UTF_8);
     store = GenesisUtil.getMetadataStore(this.type);
   }
 
   @Benchmark
   public void test() throws IOException {
     long x = org.apache.commons.lang3.RandomUtils.nextLong(0L, MAX_KEYS);
-    store.put(Long.toHexString(x).getBytes(Charset.forName("UTF-8")), data);
+    store.put(Long.toHexString(x).getBytes(StandardCharsets.UTF_8), data);
   }
 }
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java
index daf44ec..9f79b82 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkRocksDbStore.java
@@ -28,7 +28,7 @@ import org.rocksdb.*;
 
 import java.io.File;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
 
 /**
@@ -68,7 +68,7 @@ public class BenchMarkRocksDbStore {
   @Setup(Level.Trial)
   public void initialize() throws IOException {
     data = RandomStringUtils.randomAlphanumeric(DATA_LEN)
-        .getBytes(Charset.forName("UTF-8"));
+        .getBytes(StandardCharsets.UTF_8);
     org.rocksdb.Options opts = new org.rocksdb.Options();
     File dbFile = Paths.get(System.getProperty(TMP_DIR))
         .resolve(RandomStringUtils.randomNumeric(DB_FILE_LEN))
@@ -112,8 +112,8 @@ public class BenchMarkRocksDbStore {
   @Benchmark
   public void test(Blackhole bh) throws IOException {
     long x = org.apache.commons.lang3.RandomUtils.nextLong(0L, MAX_KEYS);
-    store.put(Long.toHexString(x).getBytes(Charset.forName("UTF-8")), data);
+    store.put(Long.toHexString(x).getBytes(StandardCharsets.UTF_8), data);
     bh.consume(
-        store.get(Long.toHexString(x).getBytes(Charset.forName("UTF-8"))));
+        store.get(Long.toHexString(x).getBytes(StandardCharsets.UTF_8)));
   }
 }
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
index 76b32b2..71039f4 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
@@ -45,7 +45,6 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -72,7 +71,6 @@ public class SQLCLI  extends Configured implements Tool {
 
   private Options options;
   private BasicParser parser;
-  private final Charset encoding = Charset.forName("UTF-8");
   private final OzoneConfiguration conf;
 
   // for container.db


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ozone.apache.org
For additional commands, e-mail: commits-help@ozone.apache.org