You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2022/04/27 09:30:34 UTC

[hadoop] branch branch-3.3 updated: HADOOP-17956. Replace all default Charset usage with UTF-8 (#3529)

This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new bb13e228bc2 HADOOP-17956. Replace all default Charset usage with UTF-8 (#3529)
bb13e228bc2 is described below

commit bb13e228bc221074d726f6e6bc8e318573f77b8d
Author: Viraj Jasani <vj...@apache.org>
AuthorDate: Thu Oct 14 09:37:24 2021 +0530

    HADOOP-17956. Replace all default Charset usage with UTF-8 (#3529)
    
    Change-Id: I0094a84619ce19acf340d8dd1040cfe9bd88184e
    Signed-off-by: Akira Ajisaka <aa...@apache.org>
---
 .../java/org/apache/hadoop/security/KDiag.java     |  4 ++--
 .../org/apache/hadoop/security/ProviderUtils.java  |  5 ++---
 .../hadoop/security/ShellBasedIdMapping.java       |  4 +---
 .../org/apache/hadoop/util/ReflectionUtils.java    |  6 +++---
 .../main/java/org/apache/hadoop/util/Shell.java    | 10 ++++-----
 .../apache/hadoop/fs/shell/TestTextCommand.java    |  4 ++--
 .../java/org/apache/hadoop/security/TestKDiag.java |  4 ++--
 .../delegation/web/TestWebDelegationToken.java     | 25 +++++++++-------------
 .../java/org/apache/hadoop/test/StatUtils.java     |  9 ++++----
 .../hadoop/util/TestApplicationClassLoader.java    |  4 ++--
 .../secure/AbstractSecureRegistryTest.java         |  4 ++--
 .../hadoop/registry/secure/TestSecureLogins.java   |  8 +++----
 .../hadoop/fs/http/server/TestHttpFSServer.java    |  5 ++---
 .../server/diskbalancer/command/CancelCommand.java |  4 ++--
 .../diskbalancer/command/ExecuteCommand.java       |  4 ++--
 .../hadoop/fs/shell/TestHdfsTextCommand.java       |  4 ++--
 .../TestOfflineImageViewerForXAttr.java            | 14 +++++-------
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java    |  3 +--
 .../hadoop/mapred/TestJavaSerialization.java       |  5 ++---
 .../mapred/uploader/TestFrameworkUploader.java     |  5 ++---
 .../maven/plugin/resourcegz/ResourceGzMojo.java    |  4 ++--
 .../apache/hadoop/tools/TestHadoopArchiveLogs.java |  5 ++---
 .../hadoop/fs/s3a/TestS3AInputStreamRetry.java     |  5 ++---
 .../hadoop/fs/s3a/select/ITestS3SelectCLI.java     |  5 ++---
 .../hadoop/yarn/service/ServiceScheduler.java      |  4 ++--
 .../yarn/service/provider/ProviderUtils.java       |  5 ++---
 .../yarn/util/DockerClientConfigHandler.java       |  2 +-
 .../resolver/DefaultSubClusterResolverImpl.java    |  4 ++--
 .../hadoop/yarn/server/webapp/LogServlet.java      |  5 +++--
 .../yarn/server/nodemanager/ContainerExecutor.java |  5 ++---
 .../resources/CGroupElasticMemoryController.java   |  7 +++---
 .../server/nodemanager/TestContainerExecutor.java  | 10 ++++-----
 .../TestCGroupElasticMemoryController.java         | 20 +++++++----------
 .../placement/TestPlacementRuleFS.java             |  5 ++---
 34 files changed, 93 insertions(+), 124 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
index ddadfbf2171..b2797871339 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
@@ -46,7 +46,7 @@ import java.io.InputStream;
 import java.io.PrintWriter;
 import java.lang.reflect.InvocationTargetException;
 import java.net.InetAddress;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
@@ -924,7 +924,7 @@ public class KDiag extends Configured implements Tool, Closeable {
    */
   private void dump(File file) throws IOException {
     try (InputStream in = Files.newInputStream(file.toPath())) {
-      for (String line : IOUtils.readLines(in, Charset.defaultCharset())) {
+      for (String line : IOUtils.readLines(in, StandardCharsets.UTF_8)) {
         println("%s", line);
       }
     }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
index 86e065e04d1..b51b030e020 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
@@ -23,7 +23,7 @@ import java.io.InputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.io.IOUtils;
@@ -225,8 +225,7 @@ public final class ProviderUtils {
           throw new IOException("Password file does not exist");
         }
         try (InputStream is = pwdFile.openStream()) {
-          pass = IOUtils.toString(is, Charset.defaultCharset()).trim()
-              .toCharArray();
+          pass = IOUtils.toString(is, StandardCharsets.UTF_8).trim().toCharArray();
         }
       }
     }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
index e517bad4bb4..91b40bf0d1d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
@@ -21,7 +21,6 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.HashMap;
@@ -222,8 +221,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
       Process process = Runtime.getRuntime().exec(
           new String[] { "bash", "-c", command });
       br = new BufferedReader(
-          new InputStreamReader(process.getInputStream(),
-                                Charset.defaultCharset()));
+          new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8));
       String line = null;
       while ((line = br.readLine()) != null) {
         String[] nameId = line.split(regex);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
index 1ae71d187d3..c6a77109f0e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
@@ -29,7 +29,7 @@ import java.lang.management.ThreadMXBean;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -224,7 +224,7 @@ public class ReflectionUtils {
         try {
           ByteArrayOutputStream buffer = new ByteArrayOutputStream();
           printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title);
-          log.info(buffer.toString(Charset.defaultCharset().name()));
+          log.info(buffer.toString(StandardCharsets.UTF_8.name()));
         } catch (UnsupportedEncodingException ignored) {
         }
       }
@@ -253,7 +253,7 @@ public class ReflectionUtils {
         try {
           ByteArrayOutputStream buffer = new ByteArrayOutputStream();
           printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title);
-          log.info(buffer.toString(Charset.defaultCharset().name()));
+          log.info(buffer.toString(StandardCharsets.UTF_8.name()));
         } catch (UnsupportedEncodingException ignored) {
         }
       }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
index 650fc7e8571..4a1dcd563a5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
@@ -23,7 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.InterruptedIOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
@@ -948,11 +948,11 @@ public abstract class Shell {
       timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
     }
     final BufferedReader errReader =
-            new BufferedReader(new InputStreamReader(
-                process.getErrorStream(), Charset.defaultCharset()));
+            new BufferedReader(new InputStreamReader(process.getErrorStream(),
+                StandardCharsets.UTF_8));
     BufferedReader inReader =
-            new BufferedReader(new InputStreamReader(
-                process.getInputStream(), Charset.defaultCharset()));
+            new BufferedReader(new InputStreamReader(process.getInputStream(),
+                StandardCharsets.UTF_8));
     final StringBuffer errMsg = new StringBuffer();
 
     // read error and input streams as this would free up the buffers
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
index a9e7f57a292..c99b97e6e40 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
@@ -27,7 +27,7 @@ import java.io.InputStream;
 import java.io.StringWriter;
 import java.lang.reflect.Method;
 import java.net.URI;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 
@@ -125,7 +125,7 @@ public class TestTextCommand {
 
   private String inputStreamToString(InputStream stream) throws IOException {
     StringWriter writer = new StringWriter();
-    IOUtils.copy(stream, writer, Charset.defaultCharset());
+    IOUtils.copy(stream, writer, StandardCharsets.UTF_8);
     return writer.toString();
   }
 
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java
index ecc71cf0950..706701f67bd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java
@@ -36,7 +36,7 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Properties;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
@@ -235,7 +235,7 @@ public class TestKDiag extends Assert {
    */
   private void dump(File file) throws IOException {
     try (FileInputStream in = new FileInputStream(file)) {
-      for (String line : IOUtils.readLines(in, Charset.defaultCharset())) {
+      for (String line : IOUtils.readLines(in, StandardCharsets.UTF_8)) {
         LOG.info(line);
       }
     }
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java
index 366c114fc1f..69e252222be 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java
@@ -64,7 +64,7 @@ import java.io.IOException;
 import java.io.Writer;
 import java.net.HttpURLConnection;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
@@ -555,8 +555,7 @@ public class TestWebDelegationToken {
           HttpURLConnection conn = aUrl.openConnection(url, token);
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
               conn.getResponseCode());
-          List<String> ret = IOUtils.readLines(conn.getInputStream(),
-              Charset.defaultCharset());
+          List<String> ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals(FOO_USER, ret.get(0));
 
@@ -626,8 +625,7 @@ public class TestWebDelegationToken {
           HttpURLConnection conn = aUrl.openConnection(url, token);
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
               conn.getResponseCode());
-          List<String> ret = IOUtils
-              .readLines(conn.getInputStream(), Charset.defaultCharset());
+          List<String> ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals(FOO_USER, ret.get(0));
 
@@ -851,15 +849,14 @@ public class TestWebDelegationToken {
       HttpURLConnection conn = 
           (HttpURLConnection) new URL(strUrl).openConnection();
       Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-      List<String> ret =
-          IOUtils.readLines(conn.getInputStream(), Charset.defaultCharset());
+      List<String> ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8);
       Assert.assertEquals(1, ret.size());
       Assert.assertEquals(OK_USER, ret.get(0));
       strUrl = String.format("%s?user.name=%s&DOAS=%s", url.toExternalForm(), 
           FOO_USER, OK_USER);
       conn = (HttpURLConnection) new URL(strUrl).openConnection();
       Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-      ret = IOUtils.readLines(conn.getInputStream(), Charset.defaultCharset());
+      ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8);
       Assert.assertEquals(1, ret.size());
       Assert.assertEquals(OK_USER, ret.get(0));
 
@@ -877,7 +874,7 @@ public class TestWebDelegationToken {
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
               conn.getResponseCode());
           List<String> ret = IOUtils
-              .readLines(conn.getInputStream(), Charset.defaultCharset());
+              .readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals(OK_USER, ret.get(0));
 
@@ -898,7 +895,7 @@ public class TestWebDelegationToken {
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
               conn.getResponseCode());
           ret = IOUtils
-              .readLines(conn.getInputStream(), Charset.defaultCharset());
+              .readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals(FOO_USER, ret.get(0));
 
@@ -960,7 +957,7 @@ public class TestWebDelegationToken {
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
               conn.getResponseCode());
           List<String> ret = IOUtils
-              .readLines(conn.getInputStream(), Charset.defaultCharset());
+              .readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals("remoteuser=" + FOO_USER+ ":ugi=" + FOO_USER, 
               ret.get(0));
@@ -969,8 +966,7 @@ public class TestWebDelegationToken {
           conn = aUrl.openConnection(url, token, OK_USER);
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
               conn.getResponseCode());
-          ret = IOUtils
-              .readLines(conn.getInputStream(), Charset.defaultCharset());
+          ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals("realugi=" + FOO_USER +":remoteuser=" + OK_USER + 
                   ":ugi=" + OK_USER, ret.get(0));
@@ -1022,8 +1018,7 @@ public class TestWebDelegationToken {
           HttpURLConnection conn = aUrl.openConnection(url, token, OK_USER);
           Assert.assertEquals(HttpURLConnection.HTTP_OK,
                   conn.getResponseCode());
-          List<String> ret = IOUtils
-              .readLines(conn.getInputStream(), Charset.defaultCharset());
+          List<String> ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8);
           Assert.assertEquals(1, ret.size());
           Assert.assertEquals("realugi=" + FOO_USER +":remoteuser=" + OK_USER +
                   ":ugi=" + OK_USER, ret.get(0));
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java
index fef35d0561c..8da6df88c2b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.util.Shell;
 
 import java.io.BufferedReader;
 import java.io.InputStreamReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -113,10 +113,9 @@ public class StatUtils {
     ExecutorService executorService = Executors.newSingleThreadExecutor();
     executorService.awaitTermination(2000, TimeUnit.MILLISECONDS);
     try {
-      Future<String> future =
-          executorService.submit(() -> new BufferedReader(
-              new InputStreamReader(process.getInputStream(),
-                  Charset.defaultCharset())).lines().findFirst().orElse(""));
+      Future<String> future = executorService.submit(() -> new BufferedReader(
+          new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)).lines()
+          .findFirst().orElse(""));
       return future.get();
     } finally {
       process.destroy();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
index 04ef41319f4..ba206368913 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
@@ -31,7 +31,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.jar.JarOutputStream;
 import java.util.zip.ZipEntry;
@@ -136,7 +136,7 @@ public class TestApplicationClassLoader {
 
     InputStream in = appClassloader.getResourceAsStream("resource.txt");
     assertNotNull("Resource should not be null for app classloader", in);
-    assertEquals("hello", IOUtils.toString(in, Charset.defaultCharset()));
+    assertEquals("hello", IOUtils.toString(in, StandardCharsets.UTF_8));
   }
   
   private File makeTestJar() throws IOException {
diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java
index 75b6fb287d9..a510f84bd94 100644
--- a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java
+++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java
@@ -49,7 +49,7 @@ import javax.security.auth.login.LoginException;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.Principal;
 import java.util.HashSet;
 import java.util.Properties;
@@ -220,7 +220,7 @@ public class AbstractSecureRegistryTest extends RegistryTestHelper {
         BOB_LOCALHOST, keytab_bob));
 
     jaasFile = new File(kdcWorkDir, "jaas.txt");
-    FileUtils.write(jaasFile, jaas.toString(), Charset.defaultCharset());
+    FileUtils.write(jaasFile, jaas.toString(), StandardCharsets.UTF_8);
     LOG.info("\n"+ jaas);
     RegistrySecurity.bindJVMtoJAASFile(jaasFile);
   }
diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java
index 1cdc47d562d..52d677e00a5 100644
--- a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java
+++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.registry.secure;
 import java.io.File;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.Method;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.Principal;
 import java.security.PrivilegedExceptionAction;
 import java.util.HashMap;
@@ -93,8 +93,7 @@ public class TestSecureLogins extends AbstractSecureRegistryTest {
       logLoginDetails(ALICE_LOCALHOST, client);
       String confFilename = System.getProperty(Environment.JAAS_CONF_KEY);
       assertNotNull("Unset: "+ Environment.JAAS_CONF_KEY, confFilename);
-      String config = FileUtils.readFileToString(new File(confFilename),
-          Charset.defaultCharset());
+      String config = FileUtils.readFileToString(new File(confFilename), StandardCharsets.UTF_8);
       LOG.info("{}=\n{}", confFilename, config);
       RegistrySecurity.setZKSaslClientProperties(ALICE, ALICE_CLIENT_CONTEXT);
     } finally {
@@ -133,8 +132,7 @@ public class TestSecureLogins extends AbstractSecureRegistryTest {
   @Test
   public void testKerberosAuth() throws Throwable {
     File krb5conf = getKdc().getKrb5conf();
-    String krbConfig = FileUtils.readFileToString(krb5conf,
-        Charset.defaultCharset());
+    String krbConfig = FileUtils.readFileToString(krb5conf, StandardCharsets.UTF_8);
     LOG.info("krb5.conf at {}:\n{}", krb5conf, krbConfig);
     Subject subject = new Subject();
     Class<?> kerb5LoginClass =
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
index 2f0ef9ab23c..6ecc33a587c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
@@ -56,7 +56,7 @@ import java.io.Writer;
 import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -1655,8 +1655,7 @@ public class TestHttpFSServer extends HFSTestCase {
     conn.connect();
     // Verify that we read what we wrote
     Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    String content = IOUtils.toString(
-        conn.getInputStream(), Charset.defaultCharset());
+    String content = IOUtils.toString(conn.getInputStream(), StandardCharsets.UTF_8);
     Assert.assertEquals(testContent, content);
 
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java
index e3b0645130e..f478dff4af9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan;
 import org.apache.hadoop.hdfs.tools.DiskBalancerCLI;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 /**
  * Cancels a running plan.
@@ -77,7 +77,7 @@ public class CancelCommand extends Command {
           "Invalid plan file specified.");
       String planData = null;
       try (FSDataInputStream plan = open(planFile)) {
-        planData = IOUtils.toString(plan, Charset.defaultCharset());
+        planData = IOUtils.toString(plan, StandardCharsets.UTF_8);
       }
       cancelPlan(planData);
     }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java
index 88297453fbd..5b5dc2ad5b4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan;
 import org.apache.hadoop.hdfs.tools.DiskBalancerCLI;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 /**
  * executes a given plan.
@@ -69,7 +69,7 @@ public class ExecuteCommand extends Command {
 
     String planData = null;
     try (FSDataInputStream plan = open(planFile)) {
-      planData = IOUtils.toString(plan, Charset.defaultCharset());
+      planData = IOUtils.toString(plan, StandardCharsets.UTF_8);
     }
 
     boolean skipDateCheck = false;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java
index cb7773c8389..57e7cf415f6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java
@@ -25,7 +25,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.StringWriter;
 import java.lang.reflect.Method;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -107,7 +107,7 @@ public class TestHdfsTextCommand {
 
   private String inputStreamToString(InputStream stream) throws IOException {
     StringWriter writer = new StringWriter();
-    IOUtils.copy(stream, writer, Charset.defaultCharset());
+    IOUtils.copy(stream, writer, StandardCharsets.UTF_8);
     return writer.toString();
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java
index f402017b9c4..6c7d7b78963 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java
@@ -25,7 +25,7 @@ import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -126,8 +126,7 @@ public class TestOfflineImageViewerForXAttr {
 
       assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
 
-      String content = IOUtils
-          .toString(connection.getInputStream(), Charset.defaultCharset());
+      String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8);
 
       assertTrue("Missing user.attr1 in response ",
           content.contains("user.attr1"));
@@ -152,8 +151,7 @@ public class TestOfflineImageViewerForXAttr {
       connection.connect();
 
       assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
-      String content = IOUtils
-          .toString(connection.getInputStream(), Charset.defaultCharset());
+      String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8);
 
       assertTrue("Missing user.attr1 in response ",
           content.contains("user.attr1"));
@@ -186,8 +184,7 @@ public class TestOfflineImageViewerForXAttr {
       connection.connect();
 
       assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
-      String content = IOUtils
-          .toString(connection.getInputStream(), Charset.defaultCharset());
+      String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8);
       assertEquals(attr1JSon, content);
     }
   }
@@ -209,8 +206,7 @@ public class TestOfflineImageViewerForXAttr {
       connection.connect();
 
       assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
-      String content = IOUtils
-          .toString(connection.getInputStream(), Charset.defaultCharset());
+      String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8);
       assertEquals(attr1JSon, content);
 
     }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
index 847fae5de9b..7919a0ca63d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
@@ -48,7 +48,6 @@ import java.net.SocketTimeoutException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
-import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
@@ -1489,7 +1488,7 @@ public class TestWebHDFS {
     conn.setRequestMethod(TYPE);
     conn.setInstanceFollowRedirects(false);
     String response =
-        IOUtils.toString(conn.getInputStream(), Charset.defaultCharset());
+        IOUtils.toString(conn.getInputStream(), StandardCharsets.UTF_8);
     LOG.info("Response was : " + response);
     Assert.assertEquals(
       "Response wasn't " + HttpURLConnection.HTTP_OK,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
index 414b3ca30d2..371a07c17ce 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java
@@ -23,7 +23,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Iterator;
 import java.util.StringTokenizer;
 
@@ -135,8 +135,7 @@ public class TestJavaSerialization {
           new Utils.OutputFileUtils.OutputFilesFilter()));
     assertEquals(1, outputFiles.length);
     try (InputStream is = fs.open(outputFiles[0])) {
-      String reduceOutput =
-          org.apache.commons.io.IOUtils.toString(is, Charset.defaultCharset());
+      String reduceOutput = org.apache.commons.io.IOUtils.toString(is, StandardCharsets.UTF_8);
       String[] lines = reduceOutput.split("\n");
       assertEquals("Unexpected output; received output '" + reduceOutput + "'",
           "a\t1", lines[0]);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java
index a8b6bb60790..8bf3bb0c293 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java
@@ -44,7 +44,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.HashMap;
@@ -437,8 +437,7 @@ public class TestFrameworkUploader {
       // Create a target file
       File targetFile = new File(parent, "a.txt");
       try(FileOutputStream os = new FileOutputStream(targetFile)) {
-        IOUtils.writeLines(Lists.newArrayList("a", "b"), null, os,
-            Charset.defaultCharset());
+        IOUtils.writeLines(Lists.newArrayList("a", "b"), null, os, StandardCharsets.UTF_8);
       }
       Assert.assertFalse(uploader.checkSymlink(targetFile));
 
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java
index efa714820b9..b5f3939e75a 100644
--- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java
@@ -25,7 +25,7 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.List;
@@ -114,7 +114,7 @@ public class ResourceGzMojo extends AbstractMojo {
               BufferedReader is = Files.newBufferedReader(path)
           ) {
             getLog().info("Compressing " + path + " to " + outFile);
-            IOUtils.copy(is, os, Charset.defaultCharset());
+            IOUtils.copy(is, os, StandardCharsets.UTF_8);
           }
         } else {
           throw new IOException("Directory " + outFile.getParent()
diff --git a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
index e324f1dedd0..b475cc25e8e 100644
--- a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
+++ b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java
@@ -42,7 +42,7 @@ import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.Random;
 
 public class TestHadoopArchiveLogs {
@@ -279,8 +279,7 @@ public class TestHadoopArchiveLogs {
     Assert.assertFalse(localScript.exists());
     hal.generateScript(localScript);
     Assert.assertTrue(localScript.exists());
-    String script =
-        IOUtils.toString(localScript.toURI(), Charset.defaultCharset());
+    String script = IOUtils.toString(localScript.toURI(), StandardCharsets.UTF_8);
     String[] lines = script.split("\n");
     Assert.assertEquals(22, lines.length);
     Assert.assertEquals("#!/bin/bash", lines[0]);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
index a1e56c3ce4e..db5b5b56851 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.fs.s3a;
 import javax.net.ssl.SSLException;
 import java.io.IOException;
 import java.net.SocketException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 
 import com.amazonaws.SdkClientException;
 import com.amazonaws.services.s3.model.GetObjectRequest;
@@ -200,8 +200,7 @@ public class TestS3AInputStreamRetry extends AbstractS3AMockTest {
    * @return mocked object.
    */
   private S3ObjectInputStream getMockedInputStream(boolean triggerFailure) {
-    return new S3ObjectInputStream(
-        IOUtils.toInputStream(INPUT, Charset.defaultCharset()), null) {
+    return new S3ObjectInputStream(IOUtils.toInputStream(INPUT, StandardCharsets.UTF_8), null) {
 
       private final IOException exception =
           new SSLException(new SocketException("Connection reset"));
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java
index a29abfdf639..b22d2f53b4b 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java
@@ -22,7 +22,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.junit.Assume;
@@ -168,8 +168,7 @@ public class ITestS3SelectCLI extends AbstractS3SelectTest {
         o(OPT_OUTPUT), localFile.toString(),
         landsatSrc,
         SELECT_SUNNY_ROWS_NO_LIMIT);
-    List<String> lines = IOUtils.readLines(new FileInputStream(localFile),
-        Charset.defaultCharset());
+    List<String> lines = IOUtils.readLines(new FileInputStream(localFile), StandardCharsets.UTF_8);
     LOG.info("Result from select:\n{}", lines.get(0));
     assertEquals(lineCount, lines.size());
     selectCount.assertDiffEquals("select count", 1);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
index 1bbf0f2c157..6d143a3bd4a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
@@ -97,7 +97,7 @@ import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.text.MessageFormat;
 import java.util.Collection;
 import java.util.HashMap;
@@ -549,7 +549,7 @@ public class ServiceScheduler extends CompositeService {
                 case TEMPLATE:
                   try (FSDataInputStream fileInput = fileSystem
                       .open(new Path(key.getSrcFile()))) {
-                    return IOUtils.toString(fileInput, Charset.defaultCharset());
+                    return IOUtils.toString(fileInput, StandardCharsets.UTF_8);
                   }
                 default:
                   return null;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
index f7c0a7b374e..57d76dfeecd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
@@ -48,7 +48,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.OutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -466,8 +466,7 @@ public class ProviderUtils implements YarnServiceConstants {
     content = substituteStrWithTokens(content, tokensForSubstitution);
 
     try (OutputStream output = fs.create(remoteFile)) {
-      org.apache.commons.io.IOUtils
-          .write(content, output, Charset.defaultCharset());
+      org.apache.commons.io.IOUtils.write(content, output, StandardCharsets.UTF_8);
     } catch (IOException e) {
       log.info("Failed to create " + remoteFile);
     }
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
index bb6660e0746..c996225c9a7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
@@ -87,7 +87,7 @@ public final class DockerClientConfigHandler {
     if (fs != null) {
       FSDataInputStream fileHandle = fs.open(configFile);
       if (fileHandle != null) {
-        contents = IOUtils.toString(fileHandle, Charset.defaultCharset());
+        contents = IOUtils.toString(fileHandle, StandardCharsets.UTF_8);
       }
     }
     if (contents == null) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java
index d3c5c269abb..7bb04ffde49 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java
@@ -19,7 +19,7 @@
 package org.apache.hadoop.yarn.server.federation.resolver;
 
 import java.io.BufferedReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.InvalidPathException;
 import java.nio.file.Path;
@@ -107,7 +107,7 @@ public class DefaultSubClusterResolverImpl extends AbstractSubClusterResolver
       }
 
       try {
-        reader = Files.newBufferedReader(file, Charset.defaultCharset());
+        reader = Files.newBufferedReader(file, StandardCharsets.UTF_8);
         String line = null;
         while ((line = reader.readLine()) != null) {
           String[] tokens = line.split(",");
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java
index dfeeefe5712..392adf75708 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java
@@ -45,7 +45,8 @@ import javax.ws.rs.core.GenericEntity;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.Status;
 import javax.ws.rs.core.StreamingOutput;
-import java.nio.charset.Charset;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -340,7 +341,7 @@ public class LogServlet extends Configured {
 
   private static StreamingOutput createEmptyStream() {
     return outputStream -> outputStream.write(
-        "".getBytes(Charset.defaultCharset()));
+        "".getBytes(StandardCharsets.UTF_8));
   }
 
   /**
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
index 2cb84514f22..0ef7aad8cd1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
@@ -24,7 +24,7 @@ import java.io.OutputStream;
 import java.io.PrintStream;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -350,8 +350,7 @@ public abstract class ContainerExecutor implements Configurable {
     }
 
     try {
-      return Integer.parseInt(
-          FileUtils.readFileToString(file, Charset.defaultCharset()).trim());
+      return Integer.parseInt(FileUtils.readFileToString(file, StandardCharsets.UTF_8).trim());
     } catch (NumberFormatException e) {
       throw new IOException("Error parsing exit code from pid " + pid, e);
     }
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java
index dc7f354d1a8..0e7e74d02ad 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.yarn.util.MonotonicClock;
 import java.io.File;
 import java.io.InputStream;
 import java.lang.reflect.Constructor;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -269,9 +269,8 @@ public class CGroupElasticMemoryController extends Thread {
 
       // Listen to any errors in the background. We do not expect this to
       // be large in size, so it will fit into a string.
-      Future<String> errorListener = executor.submit(
-          () -> IOUtils.toString(process.getErrorStream(),
-              Charset.defaultCharset()));
+      Future<String> errorListener =
+          executor.submit(() -> IOUtils.toString(process.getErrorStream(), StandardCharsets.UTF_8));
 
       // We get Linux event increments (8 bytes) forwarded from the event stream
       // The events cannot be split, so it is safe to read them as a whole
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
index 59a5a2d29f8..8c8d4b0b3d0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.server.nodemanager;
 
 import java.io.File;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.Arrays;
@@ -270,16 +270,14 @@ public class TestContainerExecutor {
     try {
       int writtenExitCode = 10;
 
-      FileUtils.writeStringToFile(pidFile, "2992",
-          Charset.defaultCharset(), false);
+      FileUtils.writeStringToFile(pidFile, "2992", StandardCharsets.UTF_8, false);
 
       TimerTask task = new java.util.TimerTask() {
         @Override
         public void run() {
           try {
-            FileUtils.writeStringToFile(exitCodeFile,
-                Integer.toString(writtenExitCode),
-                Charset.defaultCharset(), false);
+            FileUtils.writeStringToFile(exitCodeFile, Integer.toString(writtenExitCode),
+                StandardCharsets.UTF_8, false);
           } catch (IOException ioe) {
             LOG.warn("Could not write pid file");
           }
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java
index f10ec50f3f9..71b392f2d32 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.junit.Test;
 
 import java.io.File;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
@@ -97,8 +97,7 @@ public class TestCGroupElasticMemoryController {
         script.getAbsolutePath());
     try {
       FileUtils.writeStringToFile(script,
-          "#!/bin/bash\nprintf oomevent;printf oomevent;\n",
-          Charset.defaultCharset(), false);
+          "#!/bin/bash\nprintf oomevent;printf oomevent;\n", StandardCharsets.UTF_8, false);
       assertTrue("Could not set executable",
           script.setExecutable(true));
 
@@ -138,9 +137,8 @@ public class TestCGroupElasticMemoryController {
     conf.set(YarnConfiguration.NM_ELASTIC_MEMORY_CONTROL_OOM_LISTENER_PATH,
         script.getAbsolutePath());
     try {
-      FileUtils.writeStringToFile(script,
-          "#!/bin/bash\nprintf oomevent;printf oomevent;\n",
-          Charset.defaultCharset(), false);
+      FileUtils.writeStringToFile(script, "#!/bin/bash\nprintf oomevent;printf oomevent;\n",
+          StandardCharsets.UTF_8, false);
       assertTrue("Could not set executable",
           script.setExecutable(true));
 
@@ -181,9 +179,8 @@ public class TestCGroupElasticMemoryController {
         script.getAbsolutePath());
     Runnable handler = mock(Runnable.class);
     try {
-      FileUtils.writeStringToFile(script,
-          "#!/bin/bash\nprintf oomevent;sleep 1000;\n",
-          Charset.defaultCharset(), false);
+      FileUtils.writeStringToFile(script, "#!/bin/bash\nprintf oomevent;sleep 1000;\n",
+          StandardCharsets.UTF_8, false);
       assertTrue("Could not set executable",
           script.setExecutable(true));
 
@@ -223,9 +220,8 @@ public class TestCGroupElasticMemoryController {
         script.getAbsolutePath());
     Runnable handler = mock(Runnable.class);
     try {
-      FileUtils.writeStringToFile(script,
-          "#!/bin/bash\nprintf oomevent;sleep 1000;\n",
-          Charset.defaultCharset(), false);
+      FileUtils.writeStringToFile(script, "#!/bin/bash\nprintf oomevent;sleep 1000;\n",
+          StandardCharsets.UTF_8, false);
       assertTrue("Could not set executable",
           script.setExecutable(true));
 
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java
index 741304eafbe..116c0ace70d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java
@@ -31,7 +31,7 @@ import org.w3c.dom.Element;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -194,8 +194,7 @@ public class TestPlacementRuleFS {
     Document doc = null;
     try {
       DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
-      doc = builder.parse(IOUtils.toInputStream(str,
-          Charset.defaultCharset()));
+      doc = builder.parse(IOUtils.toInputStream(str, StandardCharsets.UTF_8));
     } catch (Exception ex) {
       fail("Element creation failed, failing test");
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org