You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by bs...@apache.org on 2019/01/23 18:47:08 UTC

hive git commit: HIVE-21148: Remove Use StandardCharsets Where Possible (BELUGA BEHR via Slim Bouguerra)

Repository: hive
Updated Branches:
  refs/heads/master 268a6e5af -> a7e704c67


HIVE-21148: Remove Use StandardCharsets Where Possible (BELUGA BEHR via Slim Bouguerra)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a7e704c6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a7e704c6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a7e704c6

Branch: refs/heads/master
Commit: a7e704c679a00db68db9b9f921d133d79a32cfcc
Parents: 268a6e5
Author: BELUGA BEHR <da...@gmail.com>
Authored: Wed Jan 23 10:46:05 2019 -0800
Committer: Slim Bouguerra <bs...@apache.org>
Committed: Wed Jan 23 10:46:05 2019 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   |  10 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   6 +-
 .../org/apache/hive/jdbc/HiveBaseResultSet.java |  11 +-
 .../org/apache/hadoop/hive/ql/debug/Utils.java  |  26 ++--
 .../hive/ql/exec/SerializationUtilities.java    |  33 ++---
 ...AbstractFilterStringColLikeStringScalar.java |  25 +---
 .../serde/primitive/ParquetStringInspector.java |   9 +-
 .../vector/ParquetDataColumnReaderFactory.java  |   9 +-
 .../formatting/JsonMetaDataFormatter.java       |  25 ++--
 .../physical/CommonJoinTaskDispatcher.java      |   3 +-
 .../physical/SortMergeJoinTaskDispatcher.java   |   3 +-
 .../expressions/TestVectorMathFunctions.java    |  16 +--
 .../TestVectorStringExpressions.java            | 105 +++++++---------
 .../apache/hadoop/hive/ql/io/TestRCFile.java    | 124 +++++++++----------
 .../ql/io/parquet/TestDataWritableWriter.java   |   6 +-
 .../ql/io/sarg/TestConvertAstToSearchArg.java   |   9 +-
 .../hadoop/hive/serde2/lazy/LazyTimestamp.java  |  23 ++--
 .../hive/serde2/lazy/LazyTimestampLocalTZ.java  |  19 +--
 .../serde2/thrift/TBinarySortableProtocol.java  |  16 +--
 .../cli/operation/HiveCommandOperation.java     |  18 +--
 .../service/cli/operation/SQLOperation.java     |  19 ++-
 .../service/cli/thrift/ThriftHttpServlet.java   |   7 +-
 22 files changed, 215 insertions(+), 307 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index 325a82d..8e6b01b 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -27,6 +27,7 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.PrintStream;
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -723,9 +724,12 @@ public class CliDriver {
     CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
     ss.in = System.in;
     try {
-      ss.out = new SessionStream(System.out, true, "UTF-8");
-      ss.info = new SessionStream(System.err, true, "UTF-8");
-      ss.err = new CachingPrintStream(System.err, true, "UTF-8");
+      ss.out =
+          new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
+      ss.info =
+          new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
+      ss.err = new CachingPrintStream(System.err, true,
+          StandardCharsets.UTF_8.name());
     } catch (UnsupportedEncodingException e) {
       return 3;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 0391f21..b3a4754 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -55,6 +55,7 @@ import java.net.URI;
 import java.net.URL;
 import java.net.URLDecoder;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.time.ZoneId;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -112,11 +113,10 @@ public class HiveConf extends Configuration {
   }
 
   public static class URLEncoderDecoder implements EncoderDecoder<String, String> {
-    private static final String UTF_8 = "UTF-8";
     @Override
     public String encode(String key) {
       try {
-        return URLEncoder.encode(key, UTF_8);
+        return URLEncoder.encode(key, StandardCharsets.UTF_8.name());
       } catch (UnsupportedEncodingException e) {
         return key;
       }
@@ -125,7 +125,7 @@ public class HiveConf extends Configuration {
     @Override
     public String decode(String value) {
       try {
-        return URLDecoder.decode(value, UTF_8);
+        return URLDecoder.decode(value, StandardCharsets.UTF_8.name());
       } catch (UnsupportedEncodingException e) {
         return value;
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index 8f06fd6..dfaa40f 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -21,10 +21,10 @@ package org.apache.hive.jdbc;
 import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 import java.io.Reader;
-import java.io.UnsupportedEncodingException;
 import java.math.BigDecimal;
 import java.math.MathContext;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.sql.Array;
 import java.sql.Blob;
 import java.sql.Clob;
@@ -162,14 +162,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
       return is;
     } else if (obj instanceof String) {
       String str = (String)obj;
-      InputStream is = null;
-      try {
-        is = new ByteArrayInputStream(str.getBytes("UTF-8"));
-      } catch (UnsupportedEncodingException e) {
-        throw new SQLException("Illegal conversion to binary stream from column " +
-            columnIndex + " - Unsupported encoding exception");
-      }
-      return is;
+      return new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8));
     }
     throw new SQLException("Illegal conversion to binary stream from column " + columnIndex);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/debug/Utils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/debug/Utils.java b/ql/src/java/org/apache/hadoop/hive/ql/debug/Utils.java
index b0ba33f..4bdcea5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/debug/Utils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/debug/Utils.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.debug;
 
 import java.io.File;
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.lang.management.ManagementFactory;
 import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
 
 import javax.management.MBeanServer;
 
@@ -118,21 +118,17 @@ public class Utils {
    */
   public static String toStringBinary(final byte [] b, int off, int len) {
     StringBuilder result = new StringBuilder();
-    try {
-      String first = new String(b, off, len, "ISO-8859-1");
-      for (int i = 0; i < first.length() ; ++i ) {
-        int ch = first.charAt(i) & 0xFF;
-        if ( (ch >= '0' && ch <= '9')
-            || (ch >= 'A' && ch <= 'Z')
-            || (ch >= 'a' && ch <= 'z')
-            || " `~!@#$%^&*()-_=+[]{}\\|;:'\",.<>/?".indexOf(ch) >= 0 ) {
-          result.append(first.charAt(i));
-        } else {
-          result.append(String.format("\\x%02X", ch));
-        }
+    String first = new String(b, off, len, StandardCharsets.ISO_8859_1);
+    for (int i = 0; i < first.length() ; ++i ) {
+      int ch = first.charAt(i) & 0xFF;
+      if ( (ch >= '0' && ch <= '9')
+          || (ch >= 'A' && ch <= 'Z')
+          || (ch >= 'a' && ch <= 'z')
+          || " `~!@#$%^&*()-_=+[]{}\\|;:'\",.<>/?".indexOf(ch) >= 0 ) {
+        result.append(first.charAt(i));
+      } else {
+        result.append(String.format("\\x%02X", ch));
       }
-    } catch (UnsupportedEncodingException e) {
-      throw new RuntimeException("ISO-8859-1 not supported?", e);
     }
     return result.toString();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
index 28550d8..e4d33e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
@@ -22,10 +22,10 @@ import java.io.ByteArrayOutputStream;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.Serializable;
-import java.io.UnsupportedEncodingException;
 import java.lang.reflect.Array;
 import java.lang.reflect.Field;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.sql.Timestamp;
 import java.time.ZoneId;
 import java.util.ArrayList;
@@ -778,20 +778,12 @@ public class SerializationUtilities {
   }
 
   public static String serializeExpression(ExprNodeGenericFuncDesc expr) {
-    try {
-      return new String(Base64.encodeBase64(serializeExpressionToKryo(expr)), "UTF-8");
-    } catch (UnsupportedEncodingException ex) {
-      throw new RuntimeException("UTF-8 support required", ex);
-    }
+    return new String(Base64.encodeBase64(serializeExpressionToKryo(expr)),
+        StandardCharsets.UTF_8);
   }
 
   public static ExprNodeGenericFuncDesc deserializeExpression(String s) {
-    byte[] bytes;
-    try {
-      bytes = Base64.decodeBase64(s.getBytes("UTF-8"));
-    } catch (UnsupportedEncodingException ex) {
-      throw new RuntimeException("UTF-8 support required", ex);
-    }
+    byte[] bytes = Base64.decodeBase64(s.getBytes(StandardCharsets.UTF_8));
     return deserializeExpressionFromKryo(bytes);
   }
 
@@ -822,19 +814,14 @@ public class SerializationUtilities {
   }
 
   public static String serializeObject(Serializable expr) {
-    try {
-      return new String(Base64.encodeBase64(serializeObjectToKryo(expr)), "UTF-8");
-    } catch (UnsupportedEncodingException ex) {
-      throw new RuntimeException("UTF-8 support required", ex);
-    }
+    return new String(Base64.encodeBase64(serializeObjectToKryo(expr)),
+        StandardCharsets.UTF_8);
   }
 
-  public static <T extends Serializable> T deserializeObject(String s, Class<T> clazz) {
-    try {
-      return deserializeObjectFromKryo(Base64.decodeBase64(s.getBytes("UTF-8")), clazz);
-    } catch (UnsupportedEncodingException ex) {
-      throw new RuntimeException("UTF-8 support required", ex);
-    }
+  public static <T extends Serializable> T deserializeObject(String s,
+      Class<T> clazz) {
+    return deserializeObjectFromKryo(
+        Base64.decodeBase64(s.getBytes(StandardCharsets.UTF_8)), clazz);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java
index 277f866..8bee9d3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.CharBuffer;
 import java.nio.charset.CharsetDecoder;
@@ -227,11 +226,7 @@ public abstract class AbstractFilterStringColLikeStringScalar extends VectorExpr
     final byte [] byteSub;
 
     NoneChecker(String pattern) {
-      try {
-        byteSub = pattern.getBytes("UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new RuntimeException(e);
-      }
+      byteSub = pattern.getBytes(StandardCharsets.UTF_8);
     }
 
     public boolean check(byte[] byteS, int start, int len) {
@@ -255,11 +250,7 @@ public abstract class AbstractFilterStringColLikeStringScalar extends VectorExpr
     final byte[] byteSub;
 
     BeginChecker(String pattern) {
-      try {
-        byteSub = pattern.getBytes("UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new RuntimeException(e);
-      }
+      byteSub = pattern.getBytes(StandardCharsets.UTF_8);
     }
 
     public boolean check(byte[] byteS, int start, int len) {
@@ -278,11 +269,7 @@ public abstract class AbstractFilterStringColLikeStringScalar extends VectorExpr
     final byte[] byteSub;
 
     EndChecker(String pattern) {
-      try {
-        byteSub = pattern.getBytes("UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new RuntimeException(e);
-      }
+      byteSub = pattern.getBytes(StandardCharsets.UTF_8);
     }
 
     public boolean check(byte[] byteS, int start, int len) {
@@ -417,11 +404,7 @@ public abstract class AbstractFilterStringColLikeStringScalar extends VectorExpr
     }
 
     private int utf8Length(String chunk) {
-      try {
-        return chunk.getBytes("UTF-8").length;
-      } catch (UnsupportedEncodingException ue) {
-        throw new RuntimeException(ue);
-      }
+      return chunk.getBytes(StandardCharsets.UTF_8).length;
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
index 106ace0..a0eb43d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
@@ -13,8 +13,8 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.serde.primitive;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -107,11 +107,8 @@ public class ParquetStringInspector extends JavaStringObjectInspector implements
 
   @Override
   public Object set(final Object o, final String string) {
-    try {
-      return new BytesWritable(string == null ? null : string.getBytes("UTF-8"));
-    } catch (UnsupportedEncodingException e) {
-      throw new RuntimeException("Failed to encode string in UTF-8", e);
-    }
+    return new BytesWritable(
+        string == null ? null : string.getBytes(StandardCharsets.UTF_8));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
index c1d7133..7372275 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
@@ -41,9 +41,9 @@ import org.apache.parquet.schema.OriginalType;
 import org.apache.parquet.schema.PrimitiveType;
 
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
 /**
@@ -275,12 +275,7 @@ public final class ParquetDataColumnReaderFactory {
      * Method to convert string to UTF-8 bytes.
      */
     protected static byte[] convertToBytes(String value) {
-      try {
-        // convert integer to string
-        return value.getBytes("UTF-8");
-      } catch (UnsupportedEncodingException e) {
-        throw new RuntimeException("Failed to encode string in UTF-8", e);
-      }
+      return value.getBytes(StandardCharsets.UTF_8);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
index 4c4f9c1..6488c09 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -405,22 +406,17 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
         makeTablePartions(parts)).build());
   }
 
-  private List<Map<String, Object>> makeTablePartions(List<String> parts)
-      throws HiveException {
-    try {
-      ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>();
-      for (String part : parts) {
-        res.add(makeOneTablePartition(part));
-      }
-      return res;
-    } catch (UnsupportedEncodingException e) {
-      throw new HiveException(e);
+  private List<Map<String, Object>> makeTablePartions(List<String> parts) {
+    ArrayList<Map<String, Object>> res =
+        new ArrayList<Map<String, Object>>(parts.size());
+    for (String part : parts) {
+      res.add(makeOneTablePartition(part));
     }
+    return res;
   }
 
   // This seems like a very wrong implementation.
-  private Map<String, Object> makeOneTablePartition(String partIdent)
-      throws UnsupportedEncodingException {
+  private Map<String, Object> makeOneTablePartition(String partIdent) {
     ArrayList<Map<String, Object>> res = new ArrayList<Map<String, Object>>();
 
     ArrayList<String> names = new ArrayList<String>();
@@ -431,7 +427,10 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
       if (kv != null) {
         name = kv[0];
         if (kv.length > 1) {
-          val = URLDecoder.decode(kv[1], "UTF-8");
+          try {
+            val = URLDecoder.decode(kv[1], StandardCharsets.UTF_8.name());
+          } catch (UnsupportedEncodingException e) {
+          }
         }
       }
       if (val != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
index a52e5e6..e564daf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
 import java.io.Serializable;
-import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -173,7 +172,7 @@ public class CommonJoinTaskDispatcher extends AbstractJoinTaskDispatcher impleme
 
   // create map join task and set big table as bigTablePosition
   private MapRedTask convertTaskToMapJoinTask(MapredWork newWork, int bigTablePosition)
-      throws UnsupportedEncodingException, SemanticException {
+      throws SemanticException {
     // create a mapred task for this work
     MapRedTask newTask = (MapRedTask) TaskFactory.get(newWork);
     JoinOperator newJoinOp = getJoinOp(newTask);

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
index d077b19..af3edf8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
 import java.io.Serializable;
-import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -164,7 +163,7 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
   private MapRedTask convertSMBTaskToMapJoinTask(MapredWork origWork,
       int bigTablePosition,
       SMBMapJoinOperator smbJoinOp)
-      throws UnsupportedEncodingException, SemanticException {
+      throws SemanticException {
     // deep copy a new mapred work
     MapredWork newWork = SerializationUtilities.clonePlan(origWork);
     // create a mapred task for this work

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
index 14d1343..e127d42 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Random;
@@ -331,15 +330,12 @@ public class TestVectorMathFunctions {
     inL.vector[1] = 255;
     inL.vector[2] = 0;
     inS.initBuffer();
-    try {
-      inS.setVal(0, "00".getBytes("UTF-8"), 0, 2);
-      inS.setVal(1, "3232".getBytes("UTF-8"), 0, 4);
-      byte[] bad = "bad data".getBytes("UTF-8");
-      inS.setVal(2, bad, 0, bad.length);
-    } catch (UnsupportedEncodingException e) {
-      e.printStackTrace();
-      Assert.assertTrue(false);
-    }
+
+    inS.setVal(0, "00".getBytes(StandardCharsets.UTF_8), 0, 2);
+    inS.setVal(1, "3232".getBytes(StandardCharsets.UTF_8), 0, 4);
+    byte[] bad = "bad data".getBytes(StandardCharsets.UTF_8);
+    inS.setVal(2, bad, 0, bad.length);
+
 
     batch.cols[0] = inS;
     batch.cols[1] = inL;

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
index 902f29e..407ce04 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import static org.junit.Assert.assertEquals;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Random;
@@ -99,33 +98,33 @@ public class TestVectorStringExpressions {
   private static byte[] ascii_sentence;
 
   static {
-    try {
-      blue = "blue".getBytes("UTF-8");
-      red = "red".getBytes("UTF-8");
-      redred = "redred".getBytes("UTF-8");
-      green = "green".getBytes("UTF-8");
-      greenred = "greenred".getBytes("UTF-8");
-      redgreen = "redgreen".getBytes("UTF-8");
-      greengreen = "greengreen".getBytes("UTF-8");
-      emptyString = "".getBytes("UTF-8");
-      mixedUp = "mixedUp".getBytes("UTF-8");
-      mixedUpLower = "mixedup".getBytes("UTF-8");
-      mixedUpUpper = "MIXEDUP".getBytes("UTF-8");
-      mixPercentPattern = "mix%".getBytes("UTF-8"); // for use as wildcard pattern to test LIKE
-      multiByte = new byte[10];
-      addMultiByteChars(multiByte);
-      blanksLeft = "  foo".getBytes("UTF-8");
-      blanksRight = "foo  ".getBytes("UTF-8");
-      blanksBoth = "  foo  ".getBytes("UTF-8");
-      blankString = "  ".getBytes("UTF-8");
-      blankRanges = "   more  than a    bargain    ".getBytes("UTF-8");
-                   //012345678901234567890123456789
-      ascii_sentence = "The fox trotted over the fence.".getBytes("UTF-8");
-                      //0123456789012345678901234567890
-
-    } catch (UnsupportedEncodingException e) {
-      e.printStackTrace();
-    }
+    blue = "blue".getBytes(StandardCharsets.UTF_8);
+    red = "red".getBytes(StandardCharsets.UTF_8);
+    redred = "redred".getBytes(StandardCharsets.UTF_8);
+    green = "green".getBytes(StandardCharsets.UTF_8);
+    greenred = "greenred".getBytes(StandardCharsets.UTF_8);
+    redgreen = "redgreen".getBytes(StandardCharsets.UTF_8);
+    greengreen = "greengreen".getBytes(StandardCharsets.UTF_8);
+    emptyString = "".getBytes(StandardCharsets.UTF_8);
+    mixedUp = "mixedUp".getBytes(StandardCharsets.UTF_8);
+    mixedUpLower = "mixedup".getBytes(StandardCharsets.UTF_8);
+    mixedUpUpper = "MIXEDUP".getBytes(StandardCharsets.UTF_8);
+
+    // for use as wildcard pattern to test LIKE
+    mixPercentPattern = "mix%".getBytes(StandardCharsets.UTF_8); 
+
+    multiByte = new byte[10];
+    addMultiByteChars(multiByte);
+    blanksLeft = "  foo".getBytes(StandardCharsets.UTF_8);
+    blanksRight = "foo  ".getBytes(StandardCharsets.UTF_8);
+    blanksBoth = "  foo  ".getBytes(StandardCharsets.UTF_8);
+    blankString = "  ".getBytes(StandardCharsets.UTF_8);
+    blankRanges =
+        "   more  than a    bargain    ".getBytes(StandardCharsets.UTF_8);
+    // 012345678901234567890123456789
+    ascii_sentence =
+        "The fox trotted over the fence.".getBytes(StandardCharsets.UTF_8);
+    // 0123456789012345678901234567890
     red2 = new byte[red.length];
     System.arraycopy(red, 0, red2, 0, red.length);
   }
@@ -3184,12 +3183,8 @@ public class TestVectorStringExpressions {
     while (s.length() < 500) {
       s += s;
     }
-    byte[] b = null;
-    try {
-      b = s.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      e.printStackTrace();
-    }
+    byte[] b = s.getBytes(StandardCharsets.UTF_8);
+
     for (int i = 0; i != VectorizedRowBatch.DEFAULT_SIZE; i++) {
       bcv.setVal(i, b, 0, b.length);
     }
@@ -3202,12 +3197,8 @@ public class TestVectorStringExpressions {
   public void testLoadBytesColumnVectorByRef() {
     BytesColumnVector bcv = new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
     String s = "red";
-    byte[] b = null;
-    try {
-      b = s.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      e.printStackTrace();
-    }
+    byte[] b = s.getBytes(StandardCharsets.UTF_8);
+
     for (int i = 0; i != VectorizedRowBatch.DEFAULT_SIZE; i++) {
       bcv.setRef(i, b, 0, b.length);
     }
@@ -4291,7 +4282,7 @@ public class TestVectorStringExpressions {
   }
 
   @Test
-  public void testStringLikePatternType() throws UnsupportedEncodingException, HiveException {
+  public void testStringLikePatternType() throws HiveException {
     FilterStringColLikeStringScalar expr;
     VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(1, 1, 1);
     vrb.cols[0] = new BytesColumnVector(1);
@@ -4306,7 +4297,7 @@ public class TestVectorStringExpressions {
         expr.checker.getClass());
 
     // END pattern
-    expr = new FilterStringColLikeStringScalar(0, "%abc".getBytes("UTF-8"));
+    expr = new FilterStringColLikeStringScalar(0, "%abc".getBytes(StandardCharsets.UTF_8));
     expr.transientInit();
     expr.evaluate(vrb);
     Assert.assertEquals(FilterStringColLikeStringScalar.EndChecker.class,
@@ -4342,7 +4333,7 @@ public class TestVectorStringExpressions {
   }
 
   @Test
-  public void testStringLikeMultiByte() throws HiveException, UnsupportedEncodingException {
+  public void testStringLikeMultiByte() throws HiveException {
     FilterStringColLikeStringScalar expr;
     VectorizedRowBatch batch;
 
@@ -4427,7 +4418,7 @@ public class TestVectorStringExpressions {
   }
 
   @Test
-  public void testStringLikeRandomized() throws HiveException, UnsupportedEncodingException {
+  public void testStringLikeRandomized() throws HiveException {
     final String [] patterns = new String[] {
         "ABC%",
         "%ABC",
@@ -4445,7 +4436,7 @@ public class TestVectorStringExpressions {
     Random control = new Random(1234);
     UDFLike udf = new UDFLike();
     for (String pattern : patterns) {
-      VectorExpression expr = new FilterStringColLikeStringScalar(0, pattern.getBytes("utf-8"));
+      VectorExpression expr = new FilterStringColLikeStringScalar(0, pattern.getBytes(StandardCharsets.UTF_8));
       expr.transientInit();
       VectorizedRowBatch batch = VectorizedRowGroupGenUtil.getVectorizedRowBatch(1, 1, 1);
       batch.cols[0] = new BytesColumnVector(1);
@@ -4457,7 +4448,7 @@ public class TestVectorStringExpressions {
         BooleanWritable like = udf.evaluate(new Text(input), pText);
         batch.reset();
         bcv.initBuffer();
-        byte[] utf8 = input.getBytes("utf-8");
+        byte[] utf8 = input.getBytes(StandardCharsets.UTF_8);
         bcv.setVal(0, utf8, 0, utf8.length);
         bcv.noNulls = true;
         batch.size = 1;
@@ -4997,16 +4988,16 @@ public class TestVectorStringExpressions {
   }
 
   @Test
-  public void testSubstrStart() throws HiveException, UnsupportedEncodingException {
+  public void testSubstrStart() throws HiveException {
     // Testing no nulls and no repeating
     VectorizedRowBatch batch = new VectorizedRowBatch(2);
     BytesColumnVector v = new BytesColumnVector();
     batch.cols[0] = v;
     BytesColumnVector outV = new BytesColumnVector();
     batch.cols[1] = outV;
-    byte[] data1 = "abcd string".getBytes("UTF-8");
-    byte[] data2 = "efgh string".getBytes("UTF-8");
-    byte[] data3 = "efgh".getBytes("UTF-8");
+    byte[] data1 = "abcd string".getBytes(StandardCharsets.UTF_8);
+    byte[] data2 = "efgh string".getBytes(StandardCharsets.UTF_8);
+    byte[] data3 = "efgh".getBytes(StandardCharsets.UTF_8);
     batch.size = 3;
     v.noNulls = true;
     v.setRef(0, data1, 0, data1.length);
@@ -5022,7 +5013,7 @@ public class TestVectorStringExpressions {
     Assert.assertEquals(3, batch.size);
     Assert.assertTrue(outCol.noNulls);
     Assert.assertFalse(outCol.isRepeating);
-    byte[] expected = "string".getBytes("UTF-8");
+    byte[] expected = "string".getBytes(StandardCharsets.UTF_8);
     Assert.assertEquals(0,
     StringExpr.compare(
             expected, 0, expected.length, outCol.vector[0], outCol.start[0], outCol.length[0]
@@ -5139,7 +5130,7 @@ public class TestVectorStringExpressions {
     batch.cols[1] = outV;
     expr.evaluate(batch);
     outCol = (BytesColumnVector) batch.cols[1];
-    expected = "string".getBytes("UTF-8");
+    expected = "string".getBytes(StandardCharsets.UTF_8);
     Assert.assertTrue(outV.isRepeating);
     Assert.assertTrue(outV.noNulls);
     Assert.assertEquals(0,
@@ -5196,7 +5187,7 @@ public class TestVectorStringExpressions {
   }
 
   @Test
-  public void testSubstrStartLen() throws HiveException, UnsupportedEncodingException {
+  public void testSubstrStartLen() throws HiveException {
     // Testing no nulls and no repeating
 
     VectorizedRowBatch batch = new VectorizedRowBatch(2);
@@ -5204,9 +5195,9 @@ public class TestVectorStringExpressions {
     batch.cols[0] = v;
     BytesColumnVector outV = new BytesColumnVector();
     batch.cols[1] = outV;
-    byte[] data1 = "abcd string".getBytes("UTF-8");
-    byte[] data2 = "efgh string".getBytes("UTF-8");
-    byte[] data3 = "efgh".getBytes("UTF-8");
+    byte[] data1 = "abcd string".getBytes(StandardCharsets.UTF_8);
+    byte[] data2 = "efgh string".getBytes(StandardCharsets.UTF_8);
+    byte[] data3 = "efgh".getBytes(StandardCharsets.UTF_8);
     batch.size = 3;
     v.noNulls = true;
     v.setRef(0, data1, 0, data1.length);
@@ -5224,7 +5215,7 @@ public class TestVectorStringExpressions {
     BytesColumnVector outCol = (BytesColumnVector) batch.cols[1];
     Assert.assertEquals(3, batch.size);
     Assert.assertFalse(outCol.isRepeating);
-    byte[] expected = "string".getBytes("UTF-8");
+    byte[] expected = "string".getBytes(StandardCharsets.UTF_8);
     Assert.assertEquals(0,
     StringExpr.compare(
             expected, 0, expected.length, outCol.vector[0], outCol.start[0], outCol.length[0]

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
index c1e0d0e..04695d3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
@@ -30,7 +30,7 @@ import java.io.DataInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
@@ -116,39 +116,35 @@ public class TestRCFile {
     // Create the SerDe
     tbl = createProperties();
     SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
-    try {
-      bytesArray = new byte[][] {"123".getBytes("UTF-8"),
-          "456".getBytes("UTF-8"), "789".getBytes("UTF-8"),
-          "1000".getBytes("UTF-8"), "5.3".getBytes("UTF-8"),
-          "hive and hadoop".getBytes("UTF-8"), new byte[0],
-          "NULL".getBytes("UTF-8")};
-      s = new BytesRefArrayWritable(bytesArray.length);
-      s.set(0, new BytesRefWritable("123".getBytes("UTF-8")));
-      s.set(1, new BytesRefWritable("456".getBytes("UTF-8")));
-      s.set(2, new BytesRefWritable("789".getBytes("UTF-8")));
-      s.set(3, new BytesRefWritable("1000".getBytes("UTF-8")));
-      s.set(4, new BytesRefWritable("5.3".getBytes("UTF-8")));
-      s.set(5, new BytesRefWritable("hive and hadoop".getBytes("UTF-8")));
-      s.set(6, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      s.set(7, new BytesRefWritable("NULL".getBytes("UTF-8")));
-
-      // partial test init
-      patialS.set(0, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      patialS.set(1, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      patialS.set(2, new BytesRefWritable("789".getBytes("UTF-8")));
-      patialS.set(3, new BytesRefWritable("1000".getBytes("UTF-8")));
-      patialS.set(4, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      // LazyString has no so-called NULL sequence. The value is empty string if not.
-      patialS.set(5, new BytesRefWritable("".getBytes("UTF-8")));
-      patialS.set(6, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      // LazyString has no so-called NULL sequence. The value is empty string if not.
-      patialS.set(7, new BytesRefWritable("".getBytes("UTF-8")));
-
-      numRepeat = (int) Math.ceil((double)SequenceFile.SYNC_INTERVAL / (double)bytesArray.length);
-
-    } catch (UnsupportedEncodingException e) {
-      throw new RuntimeException(e);
-    }
+
+    bytesArray = new byte[][] {"123".getBytes(StandardCharsets.UTF_8),
+        "456".getBytes(StandardCharsets.UTF_8), "789".getBytes(StandardCharsets.UTF_8),
+        "1000".getBytes(StandardCharsets.UTF_8), "5.3".getBytes(StandardCharsets.UTF_8),
+        "hive and hadoop".getBytes(StandardCharsets.UTF_8), new byte[0],
+        "NULL".getBytes(StandardCharsets.UTF_8)};
+    s = new BytesRefArrayWritable(bytesArray.length);
+    s.set(0, new BytesRefWritable("123".getBytes(StandardCharsets.UTF_8)));
+    s.set(1, new BytesRefWritable("456".getBytes(StandardCharsets.UTF_8)));
+    s.set(2, new BytesRefWritable("789".getBytes(StandardCharsets.UTF_8)));
+    s.set(3, new BytesRefWritable("1000".getBytes(StandardCharsets.UTF_8)));
+    s.set(4, new BytesRefWritable("5.3".getBytes(StandardCharsets.UTF_8)));
+    s.set(5, new BytesRefWritable("hive and hadoop".getBytes(StandardCharsets.UTF_8)));
+    s.set(6, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8)));
+    s.set(7, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8)));
+
+    // partial test init
+    patialS.set(0, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8)));
+    patialS.set(1, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8)));
+    patialS.set(2, new BytesRefWritable("789".getBytes(StandardCharsets.UTF_8)));
+    patialS.set(3, new BytesRefWritable("1000".getBytes(StandardCharsets.UTF_8)));
+    patialS.set(4, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8)));
+    // LazyString has no so-called NULL sequence. The value is empty string if not.
+    patialS.set(5, new BytesRefWritable("".getBytes(StandardCharsets.UTF_8)));
+    patialS.set(6, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8)));
+    // LazyString has no so-called NULL sequence. The value is empty string if not.
+    patialS.set(7, new BytesRefWritable("".getBytes(StandardCharsets.UTF_8)));
+
+    numRepeat = (int) Math.ceil((double)SequenceFile.SYNC_INTERVAL / (double)bytesArray.length);
   }
 
   @After
@@ -169,14 +165,14 @@ public class TestRCFile {
   public void testSimpleReadAndWrite() throws IOException, SerDeException {
     cleanup();
 
-    byte[][] record_1 = {"123".getBytes("UTF-8"), "456".getBytes("UTF-8"),
-        "789".getBytes("UTF-8"), "1000".getBytes("UTF-8"),
-        "5.3".getBytes("UTF-8"), "hive and hadoop".getBytes("UTF-8"),
-        new byte[0], "NULL".getBytes("UTF-8")};
-    byte[][] record_2 = {"100".getBytes("UTF-8"), "200".getBytes("UTF-8"),
-        "123".getBytes("UTF-8"), "1000".getBytes("UTF-8"),
-        "5.3".getBytes("UTF-8"), "hive and hadoop".getBytes("UTF-8"),
-        new byte[0], "NULL".getBytes("UTF-8")};
+    byte[][] record_1 = {"123".getBytes(StandardCharsets.UTF_8), "456".getBytes(StandardCharsets.UTF_8),
+        "789".getBytes(StandardCharsets.UTF_8), "1000".getBytes(StandardCharsets.UTF_8),
+        "5.3".getBytes(StandardCharsets.UTF_8), "hive and hadoop".getBytes(StandardCharsets.UTF_8),
+        new byte[0], "NULL".getBytes(StandardCharsets.UTF_8)};
+    byte[][] record_2 = {"100".getBytes(StandardCharsets.UTF_8), "200".getBytes(StandardCharsets.UTF_8),
+        "123".getBytes(StandardCharsets.UTF_8), "1000".getBytes(StandardCharsets.UTF_8),
+        "5.3".getBytes(StandardCharsets.UTF_8), "hive and hadoop".getBytes(StandardCharsets.UTF_8),
+        new byte[0], "NULL".getBytes(StandardCharsets.UTF_8)};
     RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
     RCFile.Writer writer =
       new RCFile.Writer(fs, conf, file, null,
@@ -265,23 +261,23 @@ public class TestRCFile {
                         new DefaultCodec());
 
     byte[][] record_1 = {
-        "123".getBytes("UTF-8"),
-        "456".getBytes("UTF-8"),
-        "789".getBytes("UTF-8"),
-        "1000".getBytes("UTF-8"),
-        "5.3".getBytes("UTF-8"),
-        "hive and hadoop".getBytes("UTF-8"),
+        "123".getBytes(StandardCharsets.UTF_8),
+        "456".getBytes(StandardCharsets.UTF_8),
+        "789".getBytes(StandardCharsets.UTF_8),
+        "1000".getBytes(StandardCharsets.UTF_8),
+        "5.3".getBytes(StandardCharsets.UTF_8),
+        "hive and hadoop".getBytes(StandardCharsets.UTF_8),
         new byte[0],
-        "NULL".getBytes("UTF-8") };
+        "NULL".getBytes(StandardCharsets.UTF_8) };
     byte[][] record_2 = {
-        "100".getBytes("UTF-8"),
-        "200".getBytes("UTF-8"),
-        "123".getBytes("UTF-8"),
-        "1000".getBytes("UTF-8"),
-        "5.3".getBytes("UTF-8"),
-        "hive and hadoop".getBytes("UTF-8"),
+        "100".getBytes(StandardCharsets.UTF_8),
+        "200".getBytes(StandardCharsets.UTF_8),
+        "123".getBytes(StandardCharsets.UTF_8),
+        "1000".getBytes(StandardCharsets.UTF_8),
+        "5.3".getBytes(StandardCharsets.UTF_8),
+        "hive and hadoop".getBytes(StandardCharsets.UTF_8),
         new byte[0],
-        "NULL".getBytes("UTF-8")};
+        "NULL".getBytes(StandardCharsets.UTF_8)};
 
     BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
     for (int i = 0; i < record_1.length; i++) {
@@ -350,7 +346,8 @@ public class TestRCFile {
     Random rand = new Random();
     for (int recIdx = 0; recIdx < recCount; recIdx++) {
       for (int i = 0; i < record.length; i++) {
-        record[i] = new Integer(rand.nextInt()).toString().getBytes("UTF-8");
+        record[i] = new Integer(rand.nextInt()).toString()
+            .getBytes(StandardCharsets.UTF_8);
       }
       for (int i = 0; i < record.length; i++) {
         BytesRefWritable cu = new BytesRefWritable(record[i], 0,
@@ -465,11 +462,14 @@ public class TestRCFile {
       // test.performanceTest();
 
       test.testSimpleReadAndWrite();
-      byte[][] bytesArray = new byte[][] {"123".getBytes("UTF-8"),
-          "456".getBytes("UTF-8"), "789".getBytes("UTF-8"),
-          "1000".getBytes("UTF-8"), "5.3".getBytes("UTF-8"),
-          "hive and hadoop".getBytes("UTF-8"), new byte[0],
-          "NULL".getBytes("UTF-8")};
+      byte[][] bytesArray =
+          new byte[][] { "123".getBytes(StandardCharsets.UTF_8),
+              "456".getBytes(StandardCharsets.UTF_8),
+              "789".getBytes(StandardCharsets.UTF_8),
+              "1000".getBytes(StandardCharsets.UTF_8),
+              "5.3".getBytes(StandardCharsets.UTF_8),
+              "hive and hadoop".getBytes(StandardCharsets.UTF_8), new byte[0],
+              "NULL".getBytes(StandardCharsets.UTF_8) };
       test.writeTest(fs, count, file, bytesArray);
       test.fullyReadTest(fs, count, file);
       test.partialReadTest(fs, count, file);

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
index 94a38e0..b242392 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
@@ -44,7 +44,7 @@ import org.apache.parquet.io.api.RecordConsumer;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.MessageTypeParser;
 
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -136,8 +136,8 @@ public class TestDataWritableWriter {
     return new BooleanWritable(value);
   }
 
-  private BytesWritable createString(String value) throws UnsupportedEncodingException {
-    return new BytesWritable(value.getBytes("UTF-8"));
+  private BytesWritable createString(String value) {
+    return new BytesWritable(value.getBytes(StandardCharsets.UTF_8));
   }
 
   private ArrayWritable createGroup(Writable...values) {

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
index 777382c..4c40908 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
@@ -24,7 +24,7 @@ import static junit.framework.Assert.assertTrue;
 
 import java.beans.XMLDecoder;
 import java.io.ByteArrayInputStream;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Set;
 
@@ -63,12 +63,7 @@ public class TestConvertAstToSearchArg {
   }
 
   private ExprNodeGenericFuncDesc getFuncDesc(String xmlSerialized) {
-    byte[] bytes;
-    try {
-      bytes = xmlSerialized.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException ex) {
-      throw new RuntimeException("UTF-8 support required", ex);
-    }
+    byte[] bytes = xmlSerialized.getBytes(StandardCharsets.UTF_8);
 
     ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
     XMLDecoder decoder = new XMLDecoder(bais, null, null);

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
index 3473c56..1630756 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
@@ -19,11 +19,9 @@ package org.apache.hadoop.hive.serde2.lazy;
 
 import java.io.IOException;
 import java.io.OutputStream;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
 
@@ -36,7 +34,6 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestam
  *
  */
 public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, TimestampWritableV2> {
-  private static final Logger LOG = LoggerFactory.getLogger(LazyTimestamp.class);
 
   public LazyTimestamp(LazyTimestampObjectInspector oi) {
     super(oi);
@@ -58,17 +55,12 @@ public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, T
    */
   @Override
   public void init(ByteArrayRef bytes, int start, int length) {
-    String s = null;
     if (!LazyUtils.isDateMaybe(bytes.getData(), start, length)) {
       isNull = true;
       return;
     }
-    try {
-      s = new String(bytes.getData(), start, length, "US-ASCII");
-    } catch (UnsupportedEncodingException e) {
-      LOG.error("Unsupported encoding found ", e);
-      s = "";
-    }
+    String s =
+        new String(bytes.getData(), start, length, StandardCharsets.US_ASCII);
 
     Timestamp t = null;
     if (s.compareTo("NULL") == 0) {
@@ -96,12 +88,11 @@ public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, T
    */
   public static void writeUTF8(OutputStream out, TimestampWritableV2 i)
       throws IOException {
-    if (i == null) {
-      // Serialize as time 0
-      out.write(TimestampWritableV2.nullBytes);
-    } else {
-      out.write(i.toString().getBytes("US-ASCII"));
+    byte[] b = TimestampWritableV2.nullBytes;
+    if (i != null) {
+      b = i.toString().getBytes(StandardCharsets.US_ASCII);
     }
+    out.write(b);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampLocalTZ.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampLocalTZ.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampLocalTZ.java
index a969840..ad43c41 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampLocalTZ.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampLocalTZ.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.lazy;
 
 import java.io.IOException;
 import java.io.OutputStream;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.time.ZoneId;
 import java.time.format.DateTimeParseException;
 
@@ -29,8 +29,6 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampLocalTZObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * LazyPrimitive for TimestampLocalTZ. Similar to LazyTimestamp.
@@ -38,8 +36,6 @@ import org.slf4j.LoggerFactory;
 public class LazyTimestampLocalTZ extends
     LazyPrimitive<LazyTimestampLocalTZObjectInspector, TimestampLocalTZWritable> {
 
-  private static final Logger LOG = LoggerFactory.getLogger(LazyTimestampLocalTZ.class);
-
   private ZoneId timeZone;
 
   public LazyTimestampLocalTZ(LazyTimestampLocalTZObjectInspector lazyTimestampTZObjectInspector) {
@@ -68,7 +64,7 @@ public class LazyTimestampLocalTZ extends
 
     TimestampTZ t = null;
     try {
-      s = new String(bytes.getData(), start, length, "US-ASCII");
+      s = new String(bytes.getData(), start, length, StandardCharsets.US_ASCII);
       if (s.equals("NULL")) {
         isNull = true;
         logExceptionMessage(bytes, start, length,
@@ -77,9 +73,6 @@ public class LazyTimestampLocalTZ extends
         t = TimestampTZUtil.parse(s, timeZone);
         isNull = false;
       }
-    } catch (UnsupportedEncodingException e) {
-      isNull = true;
-      LOG.error("Unsupported encoding found ", e);
     } catch (DateTimeParseException e) {
       isNull = true;
       logExceptionMessage(bytes, start, length, serdeConstants.TIMESTAMPLOCALTZ_TYPE_NAME.toUpperCase());
@@ -93,10 +86,10 @@ public class LazyTimestampLocalTZ extends
   }
 
   public static void writeUTF8(OutputStream out, TimestampLocalTZWritable i) throws IOException {
-    if (i == null) {
-      out.write(TimestampLocalTZWritable.nullBytes);
-    } else {
-      out.write(i.toString().getBytes("US-ASCII"));
+    byte[] b = TimestampLocalTZWritable.nullBytes;
+    if (i != null) {
+      b = i.toString().getBytes(StandardCharsets.US_ASCII);
     }
+    out.write(b);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
index b59c919..6b19dbb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.serde2.thrift;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Properties;
 
@@ -336,12 +336,7 @@ public class TBinarySortableProtocol extends TProtocol implements
 
   @Override
   public void writeString(String str) throws TException {
-    byte[] dat;
-    try {
-      dat = str.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException uex) {
-      throw new TException("JVM DOES NOT SUPPORT UTF-8: ",uex);
-    }
+    byte[] dat = str.getBytes(StandardCharsets.UTF_8);
     writeTextBytes(dat, 0, dat.length);
   }
 
@@ -631,12 +626,7 @@ public class TBinarySortableProtocol extends TProtocol implements
       stringBytes[i] = bin[0];
       i++;
     }
-    try {
-      String r = new String(stringBytes, 0, i, "UTF-8");
-      return r;
-    } catch (UnsupportedEncodingException uex) {
-      throw new TException("JVM DOES NOT SUPPORT UTF-8: ",uex);
-    }
+    return new String(stringBytes, 0, i, StandardCharsets.UTF_8);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
index ae90278..eafe821 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
@@ -25,11 +25,11 @@ import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.lang3.CharEncoding;
 import org.apache.hadoop.hive.common.io.SessionStream;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
@@ -70,10 +70,12 @@ public class HiveCommandOperation extends ExecuteStatementOperation {
           + " and error output to file " + sessionState.getTmpErrOutputFile().toString());
       sessionState.in = null; // hive server's session input stream is not used
       // open a per-session file in auto-flush mode for writing temp results and tmp error output
-      sessionState.out =
-          new SessionStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, CharEncoding.UTF_8);
-      sessionState.err =
-          new SessionStream(new FileOutputStream(sessionState.getTmpErrOutputFile()), true,CharEncoding.UTF_8);
+      sessionState.out = new SessionStream(
+          new FileOutputStream(sessionState.getTmpOutputFile()), true,
+          StandardCharsets.UTF_8.name());
+      sessionState.err = new SessionStream(
+          new FileOutputStream(sessionState.getTmpErrOutputFile()), true,
+          StandardCharsets.UTF_8.name());
     } catch (IOException e) {
       LOG.error("Error in creating temp output file ", e);
 
@@ -82,8 +84,10 @@ public class HiveCommandOperation extends ExecuteStatementOperation {
 
       try {
         sessionState.in = null;
-        sessionState.out = new SessionStream(System.out, true, CharEncoding.UTF_8);
-        sessionState.err = new SessionStream(System.err, true, CharEncoding.UTF_8);
+        sessionState.out =
+            new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
+        sessionState.err =
+            new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
       } catch (UnsupportedEncodingException ee) {
         LOG.error("Error creating PrintStream", e);
         ee.printStackTrace();

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 429dbcd..747db58 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -21,6 +21,7 @@ package org.apache.hive.service.cli.operation;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.sql.SQLException;
 import java.util.ArrayList;
@@ -37,7 +38,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.lang3.CharEncoding;
 import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.common.io.SessionStream;
 import org.apache.hadoop.hive.common.metrics.common.Metrics;
@@ -138,9 +138,12 @@ public class SQLOperation extends ExecuteStatementOperation {
   private void setupSessionIO(SessionState sessionState) {
     try {
       sessionState.in = null; // hive server's session input stream is not used
-      sessionState.out = new SessionStream(System.out, true, CharEncoding.UTF_8);
-      sessionState.info = new SessionStream(System.err, true, CharEncoding.UTF_8);
-      sessionState.err = new SessionStream(System.err, true, CharEncoding.UTF_8);
+      sessionState.out =
+          new SessionStream(System.out, true, StandardCharsets.UTF_8.name());
+      sessionState.info =
+          new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
+      sessionState.err =
+          new SessionStream(System.err, true, StandardCharsets.UTF_8.name());
     } catch (UnsupportedEncodingException e) {
         LOG.error("Error creating PrintStream", e);
         e.printStackTrace();
@@ -542,16 +545,12 @@ public class SQLOperation extends ExecuteStatementOperation {
     List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
 
     Object[] deserializedFields = new Object[fieldRefs.size()];
-    Object rowObj;
     ObjectInspector fieldOI;
 
     int protocol = getProtocolVersion().getValue();
     for (Object rowString : rows) {
-      try {
-        rowObj = serde.deserialize(new BytesWritable(((String)rowString).getBytes("UTF-8")));
-      } catch (UnsupportedEncodingException e) {
-        throw new SerDeException(e);
-      }
+      final Object rowObj = serde.deserialize(new BytesWritable(
+          ((String) rowString).getBytes(StandardCharsets.UTF_8)));
       for (int i = 0; i < fieldRefs.size(); i++) {
         StructField fieldRef = fieldRefs.get(i);
         fieldOI = fieldRef.getFieldObjectInspector();

http://git-wip-us.apache.org/repos/asf/hive/blob/a7e704c6/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index ffc5ef4..ff8f268 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -19,7 +19,6 @@
 package org.apache.hive.service.cli.thrift;
 
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.net.InetAddress;
 import java.security.PrivilegedExceptionAction;
 import java.security.SecureRandom;
@@ -297,9 +296,8 @@ public class ThriftHttpServlet extends TServlet {
    * returns the client name associated with the session. Else, it returns null.
    * @param request The HTTP Servlet Request send by the client
    * @return Client Username if the request has valid HS2 cookie, else returns null
-   * @throws UnsupportedEncodingException
    */
-  private String validateCookie(HttpServletRequest request) throws UnsupportedEncodingException {
+  private String validateCookie(HttpServletRequest request) {
     // Find all the valid cookies associated with the request.
     Cookie[] cookies = request.getCookies();
 
@@ -319,9 +317,8 @@ public class ThriftHttpServlet extends TServlet {
    * Generate a server side cookie given the cookie value as the input.
    * @param str Input string token.
    * @return The generated cookie.
-   * @throws UnsupportedEncodingException
    */
-  private Cookie createCookie(String str) throws UnsupportedEncodingException {
+  private Cookie createCookie(String str) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("Cookie name = " + AUTH_COOKIE + " value = " + str);
     }