You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2016/08/02 15:21:30 UTC

[33/40] hadoop git commit: HADOOP-13444. Replace org.apache.commons.io.Charsets with java.nio.charset.StandardCharsets. Contributed by Vincent Poon.

HADOOP-13444. Replace org.apache.commons.io.Charsets with java.nio.charset.StandardCharsets. Contributed by Vincent Poon.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/770b5eb2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/770b5eb2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/770b5eb2

Branch: refs/heads/yarn-native-services
Commit: 770b5eb2db686275df445be9280e76cc3710ffdc
Parents: 34ccaa8
Author: Akira Ajisaka <aa...@apache.org>
Authored: Mon Aug 1 17:35:59 2016 +0900
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Mon Aug 1 17:35:59 2016 +0900

----------------------------------------------------------------------
 .../apache/hadoop/crypto/key/KeyProvider.java   |  9 +++--
 .../crypto/key/kms/KMSClientProvider.java       |  4 +-
 .../org/apache/hadoop/fs/shell/Display.java     |  8 ++--
 .../java/org/apache/hadoop/ha/StreamPumper.java |  4 +-
 .../org/apache/hadoop/http/HtmlQuoting.java     | 42 +++++++++++++-------
 .../apache/hadoop/io/DefaultStringifier.java    |  4 +-
 .../java/org/apache/hadoop/io/SequenceFile.java |  4 +-
 .../apache/hadoop/io/compress/BZip2Codec.java   |  6 +--
 .../hadoop/io/file/tfile/TFileDumper.java       |  4 +-
 .../org/apache/hadoop/ipc/RpcConstants.java     |  6 +--
 .../main/java/org/apache/hadoop/ipc/Server.java |  6 +--
 .../hadoop/metrics2/sink/GraphiteSink.java      |  5 ++-
 .../sink/ganglia/AbstractGangliaSink.java       |  4 +-
 .../org/apache/hadoop/net/TableMapping.java     |  6 +--
 .../org/apache/hadoop/security/Credentials.java |  4 +-
 .../hadoop/security/LdapGroupsMapping.java      |  4 +-
 .../apache/hadoop/security/SaslRpcServer.java   |  8 ++--
 .../hadoop/security/ShellBasedIdMapping.java    |  4 +-
 .../hadoop/security/alias/UserProvider.java     |  4 +-
 .../org/apache/hadoop/tracing/TraceAdmin.java   |  2 -
 .../org/apache/hadoop/util/FileBasedIPList.java |  4 +-
 .../org/apache/hadoop/util/HostsFileReader.java |  4 +-
 .../metrics2/impl/TestGangliaMetrics.java       |  4 +-
 .../java/org/apache/hadoop/minikdc/MiniKdc.java |  5 ++-
 .../org/apache/hadoop/mount/MountResponse.java  |  5 ++-
 .../org/apache/hadoop/nfs/nfs3/FileHandle.java  |  4 +-
 .../hadoop/nfs/nfs3/request/CREATE3Request.java |  6 +--
 .../hadoop/nfs/nfs3/request/LINK3Request.java   |  5 ++-
 .../hadoop/nfs/nfs3/request/LOOKUP3Request.java |  8 ++--
 .../hadoop/nfs/nfs3/request/MKDIR3Request.java  |  8 ++--
 .../hadoop/nfs/nfs3/request/MKNOD3Request.java  |  4 +-
 .../hadoop/nfs/nfs3/request/REMOVE3Request.java |  8 ++--
 .../hadoop/nfs/nfs3/request/RENAME3Request.java | 12 +++---
 .../hadoop/nfs/nfs3/request/RMDIR3Request.java  |  8 ++--
 .../nfs/nfs3/request/SYMLINK3Request.java       | 12 +++---
 .../main/java/org/apache/hadoop/oncrpc/XDR.java |  8 ++--
 .../hadoop/oncrpc/security/CredentialsSys.java  |  6 +--
 37 files changed, 132 insertions(+), 117 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index 9733e45..0845a5f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.security.NoSuchAlgorithmException;
 import java.util.Collections;
 import java.util.Date;
@@ -32,7 +33,6 @@ import java.util.Map;
 
 import com.google.gson.stream.JsonReader;
 import com.google.gson.stream.JsonWriter;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -209,7 +209,7 @@ public abstract class KeyProvider {
     protected byte[] serialize() throws IOException {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
       JsonWriter writer = new JsonWriter(
-          new OutputStreamWriter(buffer, Charsets.UTF_8));
+          new OutputStreamWriter(buffer, StandardCharsets.UTF_8));
       try {
         writer.beginObject();
         if (cipher != null) {
@@ -252,8 +252,9 @@ public abstract class KeyProvider {
       int versions = 0;
       String description = null;
       Map<String, String> attributes = null;
-      JsonReader reader = new JsonReader(new InputStreamReader
-        (new ByteArrayInputStream(bytes), Charsets.UTF_8));
+      JsonReader reader =
+          new JsonReader(new InputStreamReader(new ByteArrayInputStream(bytes),
+              StandardCharsets.UTF_8));
       try {
         reader.beginObject();
         while (reader.hasNext()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index 47549f7..701e116 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.crypto.key.kms;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.KeyProvider;
@@ -65,6 +64,7 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.security.GeneralSecurityException;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
@@ -271,7 +271,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
   }
 
   private static void writeJson(Map map, OutputStream os) throws IOException {
-    Writer writer = new OutputStreamWriter(os, Charsets.UTF_8);
+    Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
     ObjectMapper jsonMapper = new ObjectMapper();
     jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
index f0d7b8d..e30e45c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
@@ -21,6 +21,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.EOFException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.LinkedList;
 import java.util.zip.GZIPInputStream;
 
@@ -32,7 +33,6 @@ import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.io.JsonEncoder;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -235,10 +235,10 @@ class Display extends FsCommand {
         if (!r.next(key, val)) {
           return -1;
         }
-        byte[] tmp = key.toString().getBytes(Charsets.UTF_8);
+        byte[] tmp = key.toString().getBytes(StandardCharsets.UTF_8);
         outbuf.write(tmp, 0, tmp.length);
         outbuf.write('\t');
-        tmp = val.toString().getBytes(Charsets.UTF_8);
+        tmp = val.toString().getBytes(StandardCharsets.UTF_8);
         outbuf.write(tmp, 0, tmp.length);
         outbuf.write('\n');
         inbuf.reset(outbuf.getData(), outbuf.getLength());
@@ -301,7 +301,7 @@ class Display extends FsCommand {
       if (!fileReader.hasNext()) {
         // Write a new line after the last Avro record.
         output.write(System.getProperty("line.separator")
-                         .getBytes(Charsets.UTF_8));
+                         .getBytes(StandardCharsets.UTF_8));
         output.flush();
       }
       pos = 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java
index 00c6401..8018f43 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java
@@ -21,8 +21,8 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 
 /**
@@ -78,7 +78,7 @@ class StreamPumper {
 
   protected void pump() throws IOException {
     InputStreamReader inputStreamReader = new InputStreamReader(
-        stream, Charsets.UTF_8);
+        stream, StandardCharsets.UTF_8);
     BufferedReader br = new BufferedReader(inputStreamReader);
     String line = null;
     while ((line = br.readLine()) != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
index 57acebd..51db21c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
@@ -17,21 +17,25 @@
  */
 package org.apache.hadoop.http;
 
-import org.apache.commons.io.Charsets;
-
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 
 /**
  * This class is responsible for quoting HTML characters.
  */
 public class HtmlQuoting {
-  private static final byte[] ampBytes = "&amp;".getBytes(Charsets.UTF_8);
-  private static final byte[] aposBytes = "&apos;".getBytes(Charsets.UTF_8);
-  private static final byte[] gtBytes = "&gt;".getBytes(Charsets.UTF_8);
-  private static final byte[] ltBytes = "&lt;".getBytes(Charsets.UTF_8);
-  private static final byte[] quotBytes = "&quot;".getBytes(Charsets.UTF_8);
+  private static final byte[] AMP_BYTES =
+      "&amp;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] APOS_BYTES =
+      "&apos;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] GT_BYTES =
+      "&gt;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] LT_BYTES =
+      "&lt;".getBytes(StandardCharsets.UTF_8);
+  private static final byte[] QUOT_BYTES =
+      "&quot;".getBytes(StandardCharsets.UTF_8);
 
   /**
    * Does the given string need to be quoted?
@@ -65,7 +69,7 @@ public class HtmlQuoting {
     if (str == null) {
       return false;
     }
-    byte[] bytes = str.getBytes(Charsets.UTF_8);
+    byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
     return needsQuoting(bytes, 0 , bytes.length);
   }
 
@@ -81,11 +85,21 @@ public class HtmlQuoting {
                                     int off, int len) throws IOException {
     for(int i=off; i < off+len; i++) {
       switch (buffer[i]) {
-      case '&': output.write(ampBytes); break;
-      case '<': output.write(ltBytes); break;
-      case '>': output.write(gtBytes); break;
-      case '\'': output.write(aposBytes); break;
-      case '"': output.write(quotBytes); break;
+      case '&':
+        output.write(AMP_BYTES);
+        break;
+      case '<':
+        output.write(LT_BYTES);
+        break;
+      case '>':
+        output.write(GT_BYTES);
+        break;
+      case '\'':
+        output.write(APOS_BYTES);
+        break;
+      case '"':
+        output.write(QUOT_BYTES);
+        break;
       default: output.write(buffer, i, 1);
       }
     }
@@ -100,7 +114,7 @@ public class HtmlQuoting {
     if (item == null) {
       return null;
     }
-    byte[] bytes = item.getBytes(Charsets.UTF_8);
+    byte[] bytes = item.getBytes(StandardCharsets.UTF_8);
     if (needsQuoting(bytes, 0, bytes.length)) {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
       try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
index 3ba577f..7453996 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
@@ -19,11 +19,11 @@
 package org.apache.hadoop.io;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.charset.UnsupportedCharsetException;
 import java.util.ArrayList;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -91,7 +91,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
     serializer.serialize(obj);
     byte[] buf = new byte[outBuf.getLength()];
     System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
-    return new String(Base64.encodeBase64(buf), Charsets.UTF_8);
+    return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
index 0f3f317..d8bec48 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
@@ -19,11 +19,11 @@
 package org.apache.hadoop.io;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.rmi.server.UID;
 import java.security.MessageDigest;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.*;
 import org.apache.hadoop.util.Options;
 import org.apache.hadoop.fs.*;
@@ -853,7 +853,7 @@ public class SequenceFile {
       try {                                       
         MessageDigest digester = MessageDigest.getInstance("MD5");
         long time = Time.now();
-        digester.update((new UID()+"@"+time).getBytes(Charsets.UTF_8));
+        digester.update((new UID()+"@"+time).getBytes(StandardCharsets.UTF_8));
         sync = digester.digest();
       } catch (Exception e) {
         throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
index 49dd9c1..bf78e0c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
@@ -22,8 +22,8 @@ import java.io.BufferedInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 
@@ -287,7 +287,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
         // The compressed bzip2 stream should start with the
         // identifying characters BZ. Caller of CBZip2OutputStream
         // i.e. this class must write these characters.
-        out.write(HEADER.getBytes(Charsets.UTF_8));
+        out.write(HEADER.getBytes(StandardCharsets.UTF_8));
       }
     }
 
@@ -421,7 +421,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
         byte[] headerBytes = new byte[HEADER_LEN];
         int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
         if (actualRead != -1) {
-          String header = new String(headerBytes, Charsets.UTF_8);
+          String header = new String(headerBytes, StandardCharsets.UTF_8);
           if (header.compareTo(HEADER) != 0) {
             bufferedIn.reset();
           } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
index aabdf57..84b92ec 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
@@ -18,13 +18,13 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -234,7 +234,7 @@ class TFileDumper {
               out.printf("%X", b);
             }
           } else {
-            out.print(new String(key, 0, sampleLen, Charsets.UTF_8));
+            out.print(new String(key, 0, sampleLen, StandardCharsets.UTF_8));
           }
           if (sampleLen < key.length) {
             out.print("...");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
index d5e795b..d38474a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.ipc;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 
 @InterfaceAudience.Private
@@ -54,8 +54,8 @@ public class RpcConstants {
   /**
    * The first four bytes of Hadoop RPC connections
    */
-  public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes
-      (Charsets.UTF_8));
+  public static final ByteBuffer HEADER =
+      ByteBuffer.wrap("hrpc".getBytes(StandardCharsets.UTF_8));
   public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that follow
   
   // 1 : Introduce ping and server does not throw away RPCs

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index caa534c..405549a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -47,6 +47,7 @@ import java.nio.channels.Selector;
 import java.nio.channels.ServerSocketChannel;
 import java.nio.channels.SocketChannel;
 import java.nio.channels.WritableByteChannel;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -69,7 +70,6 @@ import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -223,7 +223,7 @@ public abstract class Server {
    * and send back a nicer response.
    */
   private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap(
-      "GET ".getBytes(Charsets.UTF_8));
+      "GET ".getBytes(StandardCharsets.UTF_8));
   
   /**
    * An HTTP response to send back if we detect an HTTP request to our IPC
@@ -1957,7 +1957,7 @@ public abstract class Server {
     private void setupHttpRequestOnIpcPortResponse() throws IOException {
       Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this);
       fakeCall.setResponse(ByteBuffer.wrap(
-          RECEIVED_HTTP_REQ_RESPONSE.getBytes(Charsets.UTF_8)));
+          RECEIVED_HTTP_REQ_RESPONSE.getBytes(StandardCharsets.UTF_8)));
       fakeCall.sendResponse();
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index e46a654..a61fa5b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.metrics2.sink;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -35,6 +34,7 @@ import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.net.Socket;
+import java.nio.charset.StandardCharsets;
 
 /**
  * A metrics sink that writes to a Graphite server
@@ -150,7 +150,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
         try {
           // Open a connection to Graphite server.
           socket = new Socket(serverHost, serverPort);
-          writer = new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8);
+        writer = new OutputStreamWriter(socket.getOutputStream(),
+                StandardCharsets.UTF_8);
         } catch (Exception e) {
           connectionFailures++;
           if (tooManyConnectionFailures()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
index c9df0ff..887cfff 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
@@ -20,12 +20,12 @@ package org.apache.hadoop.metrics2.sink.ganglia;
 
 import java.io.IOException;
 import java.net.*;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsSink;
@@ -235,7 +235,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
    * @param s the string to be written to buffer at offset location
    */
   protected void xdr_string(String s) {
-    byte[] bytes = s.getBytes(Charsets.UTF_8);
+    byte[] bytes = s.getBytes(StandardCharsets.UTF_8);
     int len = bytes.length;
     xdr_int(len);
     System.arraycopy(bytes, 0, buffer, offset, len);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
index 59c0ca9..362cf07 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
@@ -21,15 +21,13 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TA
 
 import java.io.BufferedReader;
 import java.io.FileInputStream;
-import java.io.FileReader;
-import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -102,7 +100,7 @@ public class TableMapping extends CachedDNSToSwitchMapping {
 
       try (BufferedReader reader =
                new BufferedReader(new InputStreamReader(
-                   new FileInputStream(filename), Charsets.UTF_8))) {
+                   new FileInputStream(filename), StandardCharsets.UTF_8))) {
         String line = reader.readLine();
         while (line != null) {
           line = line.trim();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index c62a49c..5a8e81f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -29,13 +29,13 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -242,7 +242,7 @@ public class Credentials implements Writable {
   }
 
   private static final byte[] TOKEN_STORAGE_MAGIC =
-      "HDTS".getBytes(Charsets.UTF_8);
+      "HDTS".getBytes(StandardCharsets.UTF_8);
   private static final byte TOKEN_STORAGE_VERSION = 1;
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
index 5a0b1d9..4b941ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
@@ -21,6 +21,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.Reader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Hashtable;
@@ -40,7 +41,6 @@ import javax.naming.directory.SearchResult;
 import javax.naming.ldap.LdapName;
 import javax.naming.ldap.Rdn;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -642,7 +642,7 @@ public class LdapGroupsMapping
 
     StringBuilder password = new StringBuilder();
     try (Reader reader = new InputStreamReader(
-        new FileInputStream(pwFile), Charsets.UTF_8)) {
+        new FileInputStream(pwFile), StandardCharsets.UTF_8)) {
       int c = reader.read();
       while (c > -1) {
         password.append((char)c);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
index 50acc5c..377a0f1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.security.Security;
 import java.util.ArrayList;
@@ -44,7 +45,6 @@ import javax.security.sasl.SaslServer;
 import javax.security.sasl.SaslServerFactory;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -185,11 +185,11 @@ public class SaslRpcServer {
   }
   
   static String encodeIdentifier(byte[] identifier) {
-    return new String(Base64.encodeBase64(identifier), Charsets.UTF_8);
+    return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8);
   }
 
   static byte[] decodeIdentifier(String identifier) {
-    return Base64.decodeBase64(identifier.getBytes(Charsets.UTF_8));
+    return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
   }
 
   public static <T extends TokenIdentifier> T getIdentifier(String id,
@@ -208,7 +208,7 @@ public class SaslRpcServer {
 
   static char[] encodePassword(byte[] password) {
     return new String(Base64.encodeBase64(password),
-                      Charsets.UTF_8).toCharArray();
+                      StandardCharsets.UTF_8).toCharArray();
   }
 
   /** Splitting fully qualified Kerberos name into parts */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
index e45ac8c..efc1fd6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
@@ -23,12 +23,12 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -583,7 +583,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
     Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
     
     BufferedReader in = new BufferedReader(new InputStreamReader(
-        new FileInputStream(staticMapFile), Charsets.UTF_8));
+        new FileInputStream(staticMapFile), StandardCharsets.UTF_8));
     
     try {
       String line = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
index 127ccf0..0c960d8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.security.alias;
 
 import java.io.IOException;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
@@ -58,7 +58,7 @@ public class UserProvider extends CredentialProvider {
       return null;
     }
     return new CredentialEntry(
-        alias, new String(bytes, Charsets.UTF_8).toCharArray());
+        alias, new String(bytes, StandardCharsets.UTF_8).toCharArray());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java
index 4cf1ead..038435c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java
@@ -21,11 +21,9 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.InetSocketAddress;
-import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
index b0c12be..6ee1212 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
@@ -23,12 +23,12 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.Reader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -89,7 +89,7 @@ public class FileBasedIPList implements IPList {
         if (file.exists()) {
           try (
               Reader fileReader = new InputStreamReader(
-                  new FileInputStream(file), Charsets.UTF_8);
+                  new FileInputStream(file), StandardCharsets.UTF_8);
               BufferedReader bufferedReader = new BufferedReader(fileReader)) {
             List<String> lines = new ArrayList<String>();
             String line = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
index c5d6b86..1cba426 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
@@ -19,13 +19,13 @@
 package org.apache.hadoop.util;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.Set;
 import java.util.HashSet;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -85,7 +85,7 @@ public class HostsFileReader {
     BufferedReader reader = null;
     try {
       reader = new BufferedReader(
-          new InputStreamReader(fileInputStream, Charsets.UTF_8));
+          new InputStreamReader(fileInputStream, StandardCharsets.UTF_8));
       String line;
       while ((line = reader.readLine()) != null) {
         String[] nodes = line.split("[ \t\n\f\r]+");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
index c19d238..daf64bd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
@@ -25,12 +25,12 @@ import java.io.IOException;
 import java.net.DatagramPacket;
 import java.net.DatagramSocket;
 import java.net.SocketException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.AbstractMetric;
@@ -148,7 +148,7 @@ public class TestGangliaMetrics {
   private void checkMetrics(List<byte[]> bytearrlist, int expectedCount) {
     boolean[] foundMetrics = new boolean[expectedMetrics.length];
     for (byte[] bytes : bytearrlist) {
-      String binaryStr = new String(bytes, Charsets.UTF_8);
+      String binaryStr = new String(bytes, StandardCharsets.UTF_8);
       for (int index = 0; index < expectedMetrics.length; index++) {
         if (binaryStr.indexOf(expectedMetrics[index]) >= 0) {
           foundMetrics[index] = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
index 281b3cc..fe194c0 100644
--- a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
+++ b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
@@ -17,7 +17,6 @@
  */
 
 package org.apache.hadoop.minikdc;
-import org.apache.commons.io.Charsets;
 import org.apache.kerby.kerberos.kerb.KrbException;
 import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
 import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
@@ -31,6 +30,7 @@ import java.io.FileInputStream;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Locale;
@@ -95,7 +95,8 @@ public class MiniKdc {
     Properties userConf = new Properties();
     InputStreamReader r = null;
     try {
-      r = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8);
+      r = new InputStreamReader(new FileInputStream(file),
+          StandardCharsets.UTF_8);
       userConf.load(r);
     } finally {
       if (r != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
index 889d45a..a073f87 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.mount;
 
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.NfsExports;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.XDR;
@@ -77,7 +77,8 @@ public class MountResponse {
       if (hostGroups.length > 0) {
         for (int j = 0; j < hostGroups.length; j++) {
           xdr.writeBoolean(true); // Value follows - yes
-          xdr.writeVariableOpaque(hostGroups[j].getBytes(Charsets.UTF_8));
+          xdr.writeVariableOpaque(
+              hostGroups[j].getBytes(StandardCharsets.UTF_8));
         }
       }
       xdr.writeBoolean(false); // Value follows - no more group

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
index 415b459..f58c8b3 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
@@ -18,11 +18,11 @@
 package org.apache.hadoop.nfs.nfs3;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Arrays;
 
-import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
@@ -73,7 +73,7 @@ public class FileHandle {
       return;
     }
 
-    byte[] in = s.getBytes(Charsets.UTF_8);
+    byte[] in = s.getBytes(StandardCharsets.UTF_8);
     digest.update(in);
 
     byte[] digestbytes = digest.digest();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
index e75ce15..39c368d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
 import org.apache.hadoop.oncrpc.XDR;
@@ -79,9 +79,9 @@ public class CREATE3Request extends RequestWithHandle {
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
     xdr.writeInt(name.length());
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length());
     xdr.writeInt(mode);
     objAttr.serialize(xdr);
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
index 1dcb85d..6c7b76a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -57,6 +57,7 @@ public class LINK3Request extends RequestWithHandle {
     handle.serialize(xdr);
     fromDirHandle.serialize(xdr);
     xdr.writeInt(fromName.length());
-    xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8), fromName.length());
+    xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8),
+        fromName.length());
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
index 0435483..4d31a82 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -54,7 +54,7 @@ public class LOOKUP3Request extends RequestWithHandle {
   @VisibleForTesting
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
index bba26ee..93e7c7a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -55,8 +55,8 @@ public class MKDIR3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
     objAttr.serialize(xdr);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java
index 0659dd1..aa2ec8c 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.NfsFileType;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3;
@@ -80,7 +80,7 @@ public class MKNOD3Request extends RequestWithHandle {
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
     xdr.writeInt(name.length());
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length());
     objAttr.serialize(xdr);
     if (spec != null) {
       xdr.writeInt(spec.getSpecdata1());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
index 9ad156d..c4ca6d2 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -47,7 +47,7 @@ public class REMOVE3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
index c54a8e2..561b79e 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -67,10 +67,10 @@ public class RENAME3Request extends NFS3Request {
   @Override
   public void serialize(XDR xdr) {
     fromDirHandle.serialize(xdr);
-    xdr.writeInt(fromName.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8));
+    xdr.writeInt(fromName.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8));
     toDirHandle.serialize(xdr);
-    xdr.writeInt(toName.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(toName.getBytes(Charsets.UTF_8));
+    xdr.writeInt(toName.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(toName.getBytes(StandardCharsets.UTF_8));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
index 6ae0de8..6146245 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -47,7 +47,7 @@ public class RMDIR3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
index 59188e2..86d2b9f 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -63,10 +63,10 @@ public class SYMLINK3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
+    xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
     symAttr.serialize(xdr);
-    xdr.writeInt(symData.getBytes(Charsets.UTF_8).length);
-    xdr.writeFixedOpaque(symData.getBytes(Charsets.UTF_8));
+    xdr.writeInt(symData.getBytes(StandardCharsets.UTF_8).length);
+    xdr.writeFixedOpaque(symData.getBytes(StandardCharsets.UTF_8));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
index 474559e..b135388 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.oncrpc;
 
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 
-import org.apache.commons.io.Charsets;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 
@@ -166,11 +166,11 @@ public final class XDR {
   }
 
   public String readString() {
-    return new String(readVariableOpaque(), Charsets.UTF_8);
+    return new String(readVariableOpaque(), StandardCharsets.UTF_8);
   }
 
   public void writeString(String s) {
-    writeVariableOpaque(s.getBytes(Charsets.UTF_8));
+    writeVariableOpaque(s.getBytes(StandardCharsets.UTF_8));
   }
 
   private void writePadding() {
@@ -270,4 +270,4 @@ public final class XDR {
 
     return b;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/770b5eb2/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
index 4d39de3..19ba320 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.oncrpc.security;
 
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.io.Charsets;
 import org.apache.hadoop.oncrpc.XDR;
 
 /** Credential used by AUTH_SYS */
@@ -106,11 +106,11 @@ public class CredentialsSys extends Credentials {
   public void write(XDR xdr) {
     int padding = 0;
     // Ensure there are padding bytes if hostname is not a multiple of 4.
-    padding = 4 - (mHostName.getBytes(Charsets.UTF_8).length % 4);
+    padding = 4 - (mHostName.getBytes(StandardCharsets.UTF_8).length % 4);
     // padding bytes is zero if hostname is already a multiple of 4.
     padding = padding % 4;
     // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count
-    mCredentialsLength = 20 + mHostName.getBytes(Charsets.UTF_8).length;
+    mCredentialsLength = 20 + mHostName.getBytes(StandardCharsets.UTF_8).length;
     mCredentialsLength = mCredentialsLength + padding;
     // mAuxGIDs
     if (mAuxGIDs != null && mAuxGIDs.length > 0) {


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org