You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by oz...@apache.org on 2015/03/09 11:55:23 UTC

[1/3] hadoop git commit: HADOOP-11602. Backport 'Fix toUpperCase/toLowerCase to use Locale.ENGLISH.' (ozawa)

Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d9416317a -> b46f9e72d


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
index 35b63eb..9a2bb24 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
 import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
 import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
@@ -755,7 +756,7 @@ public class ClientRMService extends AbstractService implements
       if (applicationTypes != null && !applicationTypes.isEmpty()) {
         String appTypeToMatch = caseSensitive
             ? application.getApplicationType()
-            : application.getApplicationType().toLowerCase();
+            : StringUtils.toLowerCase(application.getApplicationType());
         if (!applicationTypes.contains(appTypeToMatch)) {
           continue;
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
index 230f9a9..d6e9e45 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.resourcemanager.resource;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.util.StringUtils;
 
 @Private
 @Evolving
@@ -61,7 +62,7 @@ public class ResourceWeights {
         sb.append(", ");
       }
       ResourceType resourceType = ResourceType.values()[i];
-      sb.append(resourceType.name().toLowerCase());
+      sb.append(StringUtils.toLowerCase(resourceType.name()));
       sb.append(String.format(" weight=%.1f", getWeight(resourceType)));
     }
     sb.append(">");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
index 3528c2d..102e553 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
@@ -394,7 +394,7 @@ public class CapacitySchedulerConfiguration extends ReservationSchedulerConfigur
   public QueueState getState(String queue) {
     String state = get(getQueuePrefix(queue) + STATE);
     return (state != null) ? 
-        QueueState.valueOf(state.toUpperCase()) : QueueState.RUNNING;
+        QueueState.valueOf(StringUtils.toUpperCase(state)) : QueueState.RUNNING;
   }
   
   public void setAccessibleNodeLabels(String queue, Set<String> labels) {
@@ -490,7 +490,7 @@ public class CapacitySchedulerConfiguration extends ReservationSchedulerConfigur
   }
   
   private static String getAclKey(QueueACL acl) {
-    return "acl_" + acl.toString().toLowerCase();
+    return "acl_" + StringUtils.toLowerCase(acl.toString());
   }
 
   public AccessControlList getAcl(String queue, QueueACL acl) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
index 32ef906..e477e6e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
@@ -28,6 +28,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@@ -241,7 +242,7 @@ public class FairSchedulerConfiguration extends Configuration {
   public static Resource parseResourceConfigValue(String val)
       throws AllocationConfigurationException {
     try {
-      val = val.toLowerCase();
+      val = StringUtils.toLowerCase(val);
       int memory = findResource(val, "mb");
       int vcores = findResource(val, "vcores");
       return BuilderUtils.newResource(memory, vcores);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
index cc28afc..bf2a25b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.DominantResourceFairnessPolicy;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.FairSharePolicy;
@@ -72,7 +73,7 @@ public abstract class SchedulingPolicy {
       throws AllocationConfigurationException {
     @SuppressWarnings("rawtypes")
     Class clazz;
-    String text = policy.toLowerCase();
+    String text = StringUtils.toLowerCase(policy);
     if (text.equalsIgnoreCase(FairSharePolicy.NAME)) {
       clazz = FairSharePolicy.class;
     } else if (text.equalsIgnoreCase(FifoPolicy.NAME)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
index f28a9a8..13e0835 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
@@ -77,7 +77,7 @@ class NodesPage extends RmView {
               .th(".nodeManagerVersion", "Version")._()._().tbody();
       NodeState stateFilter = null;
       if (type != null && !type.isEmpty()) {
-        stateFilter = NodeState.valueOf(type.toUpperCase());
+        stateFilter = NodeState.valueOf(StringUtils.toUpperCase(type));
       }
       Collection<RMNode> rmNodes = this.rm.getRMContext().getRMNodes().values();
       boolean isInactive = false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
index f8836d5..059ea09 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
@@ -66,6 +66,7 @@ import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
@@ -257,7 +258,8 @@ public class RMWebServices {
     } else {
       acceptedStates = EnumSet.noneOf(NodeState.class);
       for (String stateStr : states.split(",")) {
-        acceptedStates.add(NodeState.valueOf(stateStr.toUpperCase()));
+        acceptedStates.add(
+            NodeState.valueOf(StringUtils.toUpperCase(stateStr)));
       }
     }
     
@@ -506,7 +508,7 @@ public class RMWebServices {
     // if no states, returns the counts of all RMAppStates
     if (states.size() == 0) {
       for (YarnApplicationState state : YarnApplicationState.values()) {
-        states.add(state.toString().toLowerCase());
+        states.add(StringUtils.toLowerCase(state.toString()));
       }
     }
     // in case we extend to multiple applicationTypes in the future
@@ -518,8 +520,9 @@ public class RMWebServices {
     ConcurrentMap<ApplicationId, RMApp> apps = rm.getRMContext().getRMApps();
     for (RMApp rmapp : apps.values()) {
       YarnApplicationState state = rmapp.createApplicationState();
-      String type = rmapp.getApplicationType().trim().toLowerCase();
-      if (states.contains(state.toString().toLowerCase())) {
+      String type = StringUtils.toLowerCase(rmapp.getApplicationType().trim());
+      if (states.contains(
+          StringUtils.toLowerCase(state.toString()))) {
         if (types.contains(ANY)) {
           countApp(scoreboard, state, ANY);
         } else if (types.contains(type)) {
@@ -554,7 +557,8 @@ public class RMWebServices {
               if (isState) {
                 try {
                   // enum string is in the uppercase
-                  YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
+                  YarnApplicationState.valueOf(
+                      StringUtils.toUpperCase(paramStr.trim()));
                 } catch (RuntimeException e) {
                   YarnApplicationState[] stateArray =
                       YarnApplicationState.values();
@@ -564,7 +568,8 @@ public class RMWebServices {
                       + " specified. It should be one of " + allAppStates);
                 }
               }
-              params.add(paramStr.trim().toLowerCase());
+              params.add(
+                  StringUtils.toLowerCase(paramStr.trim()));
             }
           }
         }
@@ -582,7 +587,8 @@ public class RMWebServices {
     for (String state : states) {
       Map<String, Long> partScoreboard = new HashMap<String, Long>();
       scoreboard.put(
-          YarnApplicationState.valueOf(state.toUpperCase()), partScoreboard);
+          YarnApplicationState.valueOf(StringUtils.toUpperCase(state)),
+          partScoreboard);
       // types is verified no to be empty
       for (String type : types) {
         partScoreboard.put(type, 0L);


[2/3] hadoop git commit: HADOOP-11602. Backport 'Fix toUpperCase/toLowerCase to use Locale.ENGLISH.' (ozawa)

Posted by oz...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
index ddd8a1a..cf59c9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
 
 import com.google.common.base.Objects;
 import org.apache.hadoop.fs.StorageType;
-import java.util.Locale;
+import org.apache.hadoop.util.StringUtils;
 
  public class QuotaByStorageTypeEntry {
    private StorageType type;
@@ -54,7 +54,7 @@ import java.util.Locale;
    public String toString() {
      StringBuilder sb = new StringBuilder();
      assert (type != null);
-     sb.append(type.toString().toLowerCase());
+     sb.append(StringUtils.toLowerCase(type.toString()));
      sb.append(':');
      sb.append(quota);
      return sb.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
index 83e6426..ec7e0c9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
@@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable,
       return 0;
     }
     
-    String cmd = opts.getCommand().toString().toLowerCase();
+    String cmd = StringUtils.toLowerCase(opts.getCommand().toString());
     
     int exitCode = 0;
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
index 92a16cd..e6cf16c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -79,19 +80,19 @@ public class GetConf extends Configured implements Tool {
     private static final Map<String, CommandHandler> map;
     static  {
       map = new HashMap<String, CommandHandler>();
-      map.put(NAMENODE.getName().toLowerCase(), 
+      map.put(StringUtils.toLowerCase(NAMENODE.getName()),
           new NameNodesCommandHandler());
-      map.put(SECONDARY.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(SECONDARY.getName()),
           new SecondaryNameNodesCommandHandler());
-      map.put(BACKUP.getName().toLowerCase(), 
+      map.put(StringUtils.toLowerCase(BACKUP.getName()),
           new BackupNodesCommandHandler());
-      map.put(INCLUDE_FILE.getName().toLowerCase(), 
+      map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()),
           new CommandHandler(DFSConfigKeys.DFS_HOSTS));
-      map.put(EXCLUDE_FILE.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()),
           new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
-      map.put(NNRPCADDRESSES.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()),
           new NNRpcAddressesCommandHandler());
-      map.put(CONFKEY.getName().toLowerCase(),
+      map.put(StringUtils.toLowerCase(CONFKEY.getName()),
           new PrintConfKeyCommandHandler());
     }
     
@@ -116,7 +117,7 @@ public class GetConf extends Configured implements Tool {
     }
     
     public static CommandHandler getHandler(String cmd) {
-      return map.get(cmd.toLowerCase());
+      return map.get(StringUtils.toLowerCase(cmd));
     }
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
index c4b8424..de3aceb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
@@ -24,6 +24,7 @@ import java.io.OutputStream;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * EditsVisitorFactory for different implementations of EditsVisitor
@@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory {
    */
   static public OfflineEditsVisitor getEditsVisitor(String filename,
     String processor, boolean printToScreen) throws IOException {
-    if(processor.toLowerCase().equals("binary")) {
+    if(StringUtils.equalsIgnoreCase("binary", processor)) {
       return new BinaryEditsVisitor(filename);
     }
     OfflineEditsVisitor vis;
@@ -59,9 +60,9 @@ public class OfflineEditsVisitorFactory {
         outs[1] = System.out;
         out = new TeeOutputStream(outs);
       }
-      if(processor.toLowerCase().equals("xml")) {
+      if(StringUtils.equalsIgnoreCase("xml", processor)) {
         vis = new XmlEditsVisitor(out);
-      } else if(processor.toLowerCase().equals("stats")) {
+      } else if(StringUtils.equalsIgnoreCase("stats", processor)) {
         vis = new StatisticsEditsVisitor(out);
       } else {
         throw new IOException("Unknown proccesor " + processor +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
index 43fcd69..429b6fc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
@@ -33,6 +33,7 @@ import io.netty.handler.codec.http.QueryStringDecoder;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.util.StringUtils;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -51,6 +52,7 @@ import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
 import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.APPLICATION_JSON_UTF8;
 import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX;
 import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX_LENGTH;
+
 /**
  * Implement the read-only WebHDFS API for fsimage.
  */
@@ -141,7 +143,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
   private static String getOp(QueryStringDecoder decoder) {
     Map<String, List<String>> parameters = decoder.parameters();
     return parameters.containsKey("op")
-            ? parameters.get("op").get(0).toUpperCase() : null;
+        ? StringUtils.toUpperCase(parameters.get("op").get(0)) : null;
   }
 
   private static String getPath(QueryStringDecoder decoder)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
index b6ff4b6..5ad1f24 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Subclass of {@link AuthenticationFilter} that
@@ -96,7 +97,7 @@ public class AuthFilter extends AuthenticationFilter {
 
     final Map<String, List<String>> m = new HashMap<String, List<String>>();
     for(Map.Entry<String, String[]> entry : original.entrySet()) {
-      final String key = entry.getKey().toLowerCase();
+      final String key = StringUtils.toLowerCase(entry.getKey());
       List<String> strings = m.get(key);
       if (strings == null) {
         strings = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
index 2ae3445..febe125 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
@@ -28,6 +28,7 @@ import com.sun.jersey.spi.container.ContainerRequest;
 import com.sun.jersey.spi.container.ContainerRequestFilter;
 import com.sun.jersey.spi.container.ContainerResponseFilter;
 import com.sun.jersey.spi.container.ResourceFilter;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * A filter to change parameter names to lower cases
@@ -75,7 +76,7 @@ public class ParamFilter implements ResourceFilter {
       final MultivaluedMap<String, String> parameters) {
     UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
     for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
-      final String key = e.getKey().toLowerCase();
+      final String key = StringUtils.toLowerCase(e.getKey());
       for(String v : e.getValue()) {
         b = b.queryParam(key, v);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 3ca89f5..eea133b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -1243,7 +1243,7 @@ public class WebHdfsFileSystem extends FileSystem
     if (query == null) {
       return url;
     }
-    final String lower = query.toLowerCase();
+    final String lower = StringUtils.toLowerCase(query);
     if (!lower.startsWith(OFFSET_PARAM_PREFIX)
         && !lower.contains("&" + OFFSET_PARAM_PREFIX)) {
       return url;
@@ -1254,7 +1254,7 @@ public class WebHdfsFileSystem extends FileSystem
     for(final StringTokenizer st = new StringTokenizer(query, "&");
         st.hasMoreTokens();) {
       final String token = st.nextToken();
-      if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) {
+      if (!StringUtils.toLowerCase(token).startsWith(OFFSET_PARAM_PREFIX)) {
         if (b == null) {
           b = new StringBuilder("?").append(token);
         } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
index 1703e3b..60d201b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.web.resources;
 
 import java.util.Arrays;
+import org.apache.hadoop.util.StringUtils;
 
 abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>> {
   EnumParam(final Domain<E> domain, final E value) {
@@ -40,7 +41,7 @@ abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>
 
     @Override
     final E parse(final String str) {
-      return Enum.valueOf(enumClass, str.toUpperCase());
+      return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
index 5adb5a6..c2dfadf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
 
 abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSetParam.Domain<E>> {
   /** Convert an EnumSet to a string of comma separated values. */
@@ -82,7 +83,7 @@ abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSet
           i = j > 0 ? j + 1 : 0;
           j = str.indexOf(',', i);
           final String sub = j >= 0? str.substring(i, j): str.substring(i);
-          set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase()));
+          set.add(Enum.valueOf(enumClass, StringUtils.toUpperCase(sub.trim())));
         }
       }
       return set;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
index ac6acf9..b439a28 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hdfs.server.namenode.snapshot;
 
 import static org.mockito.Matchers.anyObject;
-import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
@@ -31,6 +30,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INode;
 import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -70,7 +70,7 @@ public class TestSnapshotManager {
       Assert.fail("Expected SnapshotException not thrown");
     } catch (SnapshotException se) {
       Assert.assertTrue(
-          se.getMessage().toLowerCase().contains("rollover"));
+          StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
     }
 
     // Delete a snapshot to free up a slot.
@@ -86,7 +86,7 @@ public class TestSnapshotManager {
       Assert.fail("Expected SnapshotException not thrown");
     } catch (SnapshotException se) {
       Assert.assertTrue(
-          se.getMessage().toLowerCase().contains("rollover"));
+          StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index aad63d3..a0e7041 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
 import org.apache.hadoop.yarn.client.api.TimelineClient;
@@ -711,7 +712,7 @@ public class JobHistoryEventHandler extends AbstractService
   private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
           long timestamp) {
     TimelineEvent tEvent = new TimelineEvent();
-    tEvent.setEventType(event.getEventType().name().toUpperCase());
+    tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name()));
     tEvent.setTimestamp(timestamp);
     TimelineEntity tEntity = new TimelineEntity();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
index 53f21db..0f528e4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
@@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
 
 import java.io.IOException;
 import java.net.URLDecoder;
-import java.util.Locale;
 
 import javax.servlet.http.HttpServletResponse;
 
@@ -226,8 +225,9 @@ public class AppController extends Controller implements AMParams {
     if (app.getJob() != null) {
       try {
         String tt = $(TASK_TYPE);
-        tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
-            toString().toLowerCase(Locale.US));
+        tt = tt.isEmpty() ? "All" : StringUtils.capitalize(
+            org.apache.hadoop.util.StringUtils.toLowerCase(
+                MRApps.taskType(tt).toString()));
         setTitle(join(tt, " Tasks for ", $(JOB_ID)));
       } catch (Exception e) {
         LOG.error("Failed to render tasks page with task type : "

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
index 553ba70..5b8d3a7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
@@ -314,7 +315,7 @@ public class TypeConverter {
       QueueState state) {
     org.apache.hadoop.mapreduce.QueueState qState =
       org.apache.hadoop.mapreduce.QueueState.getState(
-        state.toString().toLowerCase());
+          StringUtils.toLowerCase(state.toString()));
     return qState;
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
index 3919c42..876e555 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
@@ -303,7 +303,7 @@ public class MRApps extends Apps {
               remoteFS.getWorkingDirectory()));
           String name = (null == u.getFragment())
               ? p.getName() : u.getFragment();
-          if (!name.toLowerCase().endsWith(".jar")) {
+          if (!StringUtils.toLowerCase(name).endsWith(".jar")) {
             linkLookup.put(p, name);
           }
         }
@@ -317,7 +317,7 @@ public class MRApps extends Apps {
         if (name == null) {
           name = p.getName();
         }
-        if(!name.toLowerCase().endsWith(".jar")) {
+        if(!StringUtils.toLowerCase(name).endsWith(".jar")) {
           MRApps.addToEnvironment(
               environment,
               classpathEnvVar,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
index cc42b9c..e36efec 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.mapreduce;
 
+import org.apache.hadoop.util.StringUtils;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -151,9 +152,10 @@ public class TestTypeConverter {
         .newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class);
     queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
     org.apache.hadoop.mapreduce.QueueInfo returned =
-      TypeConverter.fromYarn(queueInfo, new Configuration());
+        TypeConverter.fromYarn(queueInfo, new Configuration());
     Assert.assertEquals("queueInfo translation didn't work.",
-      returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase());
+        returned.getState().toString(),
+        StringUtils.toLowerCase(queueInfo.getQueueState().toString()));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
index 7710ba7..1ea1666 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
@@ -115,7 +115,7 @@ abstract public class Task implements Writable, Configurable {
    * BYTES_READ counter and second one is of the BYTES_WRITTEN counter.
    */
   protected static String[] getFileSystemCounterNames(String uriScheme) {
-    String scheme = uriScheme.toUpperCase();
+    String scheme = StringUtils.toUpperCase(uriScheme);
     return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"};
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
index a53b76a..e0e5b79 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
@@ -25,7 +25,6 @@ import java.util.Arrays;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ConcurrentSkipListMap;
 import java.util.Iterator;
-import java.util.Locale;
 import java.util.Map;
 
 import com.google.common.base.Joiner;
@@ -42,6 +41,7 @@ import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.util.ResourceBundles;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * An abstract class to provide common implementation of the filesystem
@@ -227,7 +227,7 @@ public abstract class FileSystemCounterGroup<C extends Counter>
   }
 
   private String checkScheme(String scheme) {
-    String fixed = scheme.toUpperCase(Locale.US);
+    String fixed = StringUtils.toUpperCase(scheme);
     String interned = schemes.putIfAbsent(fixed, fixed);
     if (schemes.size() > MAX_NUM_SCHEMES) {
       // mistakes or abuses

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
index 86a57d9..51fe69a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
@@ -470,7 +470,7 @@ public class DistributedCache {
         if (fragment == null) {
           return false;
         }
-        String lowerCaseFragment = fragment.toLowerCase();
+        String lowerCaseFragment = StringUtils.toLowerCase(fragment);
         if (fragments.contains(lowerCaseFragment)) {
           return false;
         }
@@ -485,7 +485,7 @@ public class DistributedCache {
         if (fragment == null) {
           return false;
         }
-        String lowerCaseFragment = fragment.toLowerCase();
+        String lowerCaseFragment = StringUtils.toLowerCase(fragment);
         if (fragments.contains(lowerCaseFragment)) {
           return false;
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
index f193374..78c3a0f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
@@ -45,6 +45,8 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.util.StringUtils;
+
 /**
  * A InputFormat that reads input data from an SQL table.
  * <p>
@@ -162,7 +164,8 @@ public class DBInputFormat<T extends DBWritable>
       this.connection = createConnection();
 
       DatabaseMetaData dbMeta = connection.getMetaData();
-      this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
+      this.dbProductName =
+          StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
     }
     catch (Exception ex) {
       throw new RuntimeException(ex);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
index 04bd867..b024cb4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
@@ -222,12 +222,14 @@ public class CLI extends Configured implements Tool {
       taskType = argv[2];
       taskState = argv[3];
       displayTasks = true;
-      if (!taskTypes.contains(taskType.toUpperCase())) {
+      if (!taskTypes.contains(
+          org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
         System.out.println("Error: Invalid task-type: " + taskType);
         displayUsage(cmd);
         return exitCode;
       }
-      if (!taskStates.contains(taskState.toLowerCase())) {
+      if (!taskStates.contains(
+          org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
         System.out.println("Error: Invalid task-state: " + taskState);
         displayUsage(cmd);
         return exitCode;
@@ -588,7 +590,8 @@ public class CLI extends Configured implements Tool {
    */
   protected void displayTasks(Job job, String type, String state) 
   throws IOException, InterruptedException {
-    TaskReport[] reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase()));
+    TaskReport[] reports = job.getTaskReports(TaskType.valueOf(
+        org.apache.hadoop.util.StringUtils.toUpperCase(type)));
     for (TaskReport report : reports) {
       TIPStatus status = report.getCurrentStatus();
       if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) ||

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
index f85a2ee..53997e7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
@@ -155,16 +155,16 @@ public class TestDFSIO implements Tool {
     static ByteMultiple parseString(String sMultiple) {
       if(sMultiple == null || sMultiple.isEmpty()) // MB by default
         return MB;
-      String sMU = sMultiple.toUpperCase();
-      if(B.name().toUpperCase().endsWith(sMU))
+      String sMU = StringUtils.toUpperCase(sMultiple);
+      if(StringUtils.toUpperCase(B.name()).endsWith(sMU))
         return B;
-      if(KB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(KB.name()).endsWith(sMU))
         return KB;
-      if(MB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(MB.name()).endsWith(sMU))
         return MB;
-      if(GB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(GB.name()).endsWith(sMU))
         return GB;
-      if(TB.name().toUpperCase().endsWith(sMU))
+      if(StringUtils.toUpperCase(TB.name()).endsWith(sMU))
         return TB;
       throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
index 60c1ba6..dc78124 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 
 public class TestFileSystem extends TestCase {
   private static final Log LOG = FileSystem.LOG;
@@ -556,7 +557,8 @@ public class TestFileSystem extends TestCase {
   static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException {
     InetSocketAddress add = cluster.getNameNode().getNameNodeAddress();
     // Test upper/lower case
-    fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort()));
+    fileSys.checkPath(new Path("hdfs://"
+        + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
   }
 
   public void testFsClose() throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
index 0642052..57a7163 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.fs.slive;
 
+import org.apache.hadoop.util.StringUtils;
+
 /**
  * Constants used in various places in slive
  */
@@ -35,7 +37,7 @@ class Constants {
   enum Distribution {
     BEG, END, UNIFORM, MID;
     String lowerName() {
-      return this.name().toLowerCase();
+      return StringUtils.toLowerCase(this.name());
     }
   }
 
@@ -45,7 +47,7 @@ class Constants {
   enum OperationType {
     READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE, TRUNCATE;
     String lowerName() {
-      return this.name().toLowerCase();
+      return StringUtils.toLowerCase(this.name());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
index b4c98f7..02eca37 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.slive;
 
 import org.apache.hadoop.fs.slive.Constants.Distribution;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * This class holds the data representing what an operations distribution and
@@ -52,7 +53,7 @@ class OperationData {
       percent = (Double.parseDouble(pieces[0]) / 100.0d);
     } else if (pieces.length >= 2) {
       percent = (Double.parseDouble(pieces[0]) / 100.0d);
-      distribution = Distribution.valueOf(pieces[1].toUpperCase());
+      distribution = Distribution.valueOf(StringUtils.toUpperCase(pieces[1]));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
index 57ef017..bca5a1c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.slive;
 
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * An operation output has the following object format whereby simple types are
@@ -67,7 +68,8 @@ class OperationOutput {
           "Invalid key format - no type seperator - " + TYPE_SEP);
     }
     try {
-      dataType = OutputType.valueOf(key.substring(0, place).toUpperCase());
+      dataType = OutputType.valueOf(
+          StringUtils.toUpperCase(key.substring(0, place)));
     } catch (Exception e) {
       throw new IllegalArgumentException(
           "Invalid key format - invalid output type", e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
index ce1837f..97360d6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TextOutputFormat;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -157,7 +158,7 @@ public class SliveTest implements Tool {
     if (val == null) {
       return false;
     }
-    String cleanupOpt = val.toLowerCase().trim();
+    String cleanupOpt = StringUtils.toLowerCase(val).trim();
     if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) {
       return true;
     } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
index f155dae..0a9d0e9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -214,23 +215,25 @@ public class FileBench extends Configured implements Tool {
           if (!(fmt == Format.txt || cod == CCodec.pln)) {
             for (CType typ : ct) {
               String fn =
-                fmt.name().toUpperCase() + "_" +
-                cod.name().toUpperCase() + "_" +
-                typ.name().toUpperCase();
+                StringUtils.toUpperCase(fmt.name()) + "_" +
+                StringUtils.toUpperCase(cod.name()) + "_" +
+                StringUtils.toUpperCase(typ.name());
               typ.configure(job);
-              System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+              System.out.print(
+                  StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
               System.out.println(rwop.exec(fn, job) / 1000 +
                   " seconds");
             }
           } else {
             String fn =
-              fmt.name().toUpperCase() + "_" +
-              cod.name().toUpperCase();
+              StringUtils.toUpperCase(fmt.name()) + "_" +
+              StringUtils.toUpperCase(cod.name());
             Path p = new Path(root, fn);
             if (rwop == RW.r && !fs.exists(p)) {
               fn += cod.getExt();
             }
-            System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+            System.out.print(
+                StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
             System.out.println(rwop.exec(fn, job) / 1000 +
                 " seconds");
           }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
index 02a083b..d60905e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.After;
@@ -280,7 +281,7 @@ public class TestMapRed extends Configured implements Tool {
     public void map(WritableComparable key, Text value,
                     OutputCollector<Text, Text> output,
                     Reporter reporter) throws IOException {
-      String str = value.toString().toLowerCase();
+      String str = StringUtils.toLowerCase(value.toString());
       output.collect(new Text(str), value);
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
index 5850242..1ec8739 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
@@ -102,7 +102,7 @@ public class DBCountPageView extends Configured implements Tool {
   
   private void createConnection(String driverClassName
       , String url) throws Exception {
-    
+
     Class.forName(driverClassName);
     connection = DriverManager.getConnection(url);
     connection.setAutoCommit(false);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
----------------------------------------------------------------------
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
index f342463..b6a45ec 100644
--- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
@@ -13,6 +13,7 @@
  */
 package org.apache.hadoop.maven.plugin.versioninfo;
 
+import java.util.Locale;
 import org.apache.hadoop.maven.plugin.util.Exec;
 import org.apache.hadoop.maven.plugin.util.FileSetUtils;
 import org.apache.maven.model.FileSet;
@@ -329,7 +330,8 @@ public class VersionInfoMojo extends AbstractMojo {
       }
 
       private String normalizePath(File file) {
-        return file.getPath().toUpperCase().replaceAll("\\\\", "/");
+        return file.getPath().toUpperCase(Locale.ENGLISH)
+            .replaceAll("\\\\", "/");
       }
     });
     byte[] md5 = computeMD5(files);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
index 83c2ce5..b664fe7 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
@@ -984,8 +984,8 @@ public class AzureNativeFileSystemStore implements NativeFileSystemStore {
   private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException {
     URI asUri = new URI(rawDir);
     if (asUri.getAuthority() == null 
-        || asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase(
-        		sessionUri.getAuthority().toLowerCase(Locale.US))) {
+        || asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase(
+      sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) {
       // Applies to me.
       return trim(asUri.getPath(), "/");
     } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
index edb6b91..20fdf11 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.mapred.UniformSizeInputFormat;
 
 import com.google.common.collect.Maps;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Utility functions used in DistCp.
@@ -121,8 +122,9 @@ public class DistCpUtils {
    */
   public static Class<? extends InputFormat> getStrategy(Configuration conf,
                                                                  DistCpOptions options) {
-    String confLabel = "distcp." +
-        options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl";
+    String confLabel = "distcp."
+        + StringUtils.toLowerCase(options.getCopyStrategy())
+        + ".strategy" + ".impl";
     return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
   }
 
@@ -221,7 +223,8 @@ public class DistCpUtils {
 
     final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
     if (preserveXAttrs || preserveRawXattrs) {
-      final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+      final String rawNS =
+          StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
       Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
       Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
       if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
@@ -321,7 +324,8 @@ public class DistCpUtils {
          copyListingFileStatus.setXAttrs(srcXAttrs);
       } else {
         Map<String, byte[]> trgXAttrs = Maps.newHashMap();
-        final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+        final String rawNS =
+            StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
         for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
           final String xattrName = ent.getKey();
           if (xattrName.startsWith(rawNS)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
index f46c421..8a6819b 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
@@ -169,7 +169,9 @@ public class DistCpV1 implements Tool {
 
     final char symbol;
 
-    private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
+    private FileAttribute() {
+      symbol = StringUtils.toLowerCase(toString()).charAt(0);
+    }
     
     static EnumSet<FileAttribute> parse(String s) {
       if (s == null || s.length() == 0) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
index 83eb947..b803538 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
 
 import java.util.concurrent.CountDownLatch;
 import java.io.IOException;
+import org.apache.hadoop.util.StringUtils;
 
 enum GridmixJobSubmissionPolicy {
 
@@ -84,6 +85,6 @@ enum GridmixJobSubmissionPolicy {
   public static GridmixJobSubmissionPolicy getPolicy(
     Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) {
     String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name());
-    return valueOf(policy.toUpperCase());
+    return valueOf(StringUtils.toUpperCase(policy));
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
index 7a35b46..967929b 100644
--- a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
+++ b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
@@ -27,12 +27,12 @@ import org.apache.hadoop.fs.swift.http.RestClientBindings;
 import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
 import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.Test;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
-import java.util.Locale;
 
 public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest {
 
@@ -115,7 +115,7 @@ public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest
   public void testFilesystemIsCaseSensitive() throws Exception {
     String mixedCaseFilename = "/test/UPPER.TXT";
     Path upper = path(mixedCaseFilename);
-    Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH));
+    Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
     assertFalse("File exists" + upper, fs.exists(upper));
     assertFalse("File exists" + lower, fs.exists(lower));
     FSDataOutputStream out = fs.create(upper);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
index 47fdb1a..c53a7c2 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
@@ -38,6 +38,7 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.LineReader;
@@ -319,42 +320,42 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
     }
 
     for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) {
-      if ("-h".equals(args[i].toLowerCase())
-          || "-help".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-h", args[i])
+          || StringUtils.equalsIgnoreCase("-help", args[i])) {
         usage();
         return 0;
       }
 
-      if ("-c".equals(args[i].toLowerCase())
-          || "-collect-prefixes".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-c", args[i])
+          || StringUtils.equalsIgnoreCase("-collect-prefixes", args[i])) {
         collecting = true;
         continue;
       }
 
       // these control the job digest
-      if ("-write-job-trace".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-write-job-trace", args[i])) {
         ++i;
         jobTraceFilename = new Path(args[i]);
         continue;
       }
 
-      if ("-single-line-job-traces".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-single-line-job-traces", args[i])) {
         prettyprintTrace = false;
         continue;
       }
 
-      if ("-omit-task-details".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-omit-task-details", args[i])) {
         omitTaskDetails = true;
         continue;
       }
 
-      if ("-write-topology".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-write-topology", args[i])) {
         ++i;
         topologyFilename = new Path(args[i]);
         continue;
       }
 
-      if ("-job-digest-spectra".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-job-digest-spectra", args[i])) {
         ArrayList<Integer> values = new ArrayList<Integer>();
 
         ++i;
@@ -384,13 +385,13 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
         continue;
       }
 
-      if ("-d".equals(args[i].toLowerCase())
-          || "-debug".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-d", args[i])
+          || StringUtils.equalsIgnoreCase("-debug", args[i])) {
         debug = true;
         continue;
       }
 
-      if ("-spreads".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-spreads", args[i])) {
         int min = Integer.parseInt(args[i + 1]);
         int max = Integer.parseInt(args[i + 2]);
 
@@ -404,22 +405,22 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
       }
 
       // These control log-wide CDF outputs
-      if ("-delays".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-delays", args[i])) {
         delays = true;
         continue;
       }
 
-      if ("-runtimes".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-runtimes", args[i])) {
         runtimes = true;
         continue;
       }
 
-      if ("-tasktimes".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-tasktimes", args[i])) {
         collectTaskTimes = true;
         continue;
       }
 
-      if ("-v1".equals(args[i].toLowerCase())) {
+      if (StringUtils.equalsIgnoreCase("-v1", args[i])) {
         version = 1;
         continue;
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
index eaa9547..c5ae2fc 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
@@ -433,7 +433,7 @@ public class JobBuilder {
       return Values.SUCCESS;
     }
     
-    return Values.valueOf(name.toUpperCase());
+    return Values.valueOf(StringUtils.toUpperCase(name));
   }
 
   private void processTaskUpdatedEvent(TaskUpdatedEvent event) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
index 903d5fb..4a23fa6 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounter;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounterGroup;
 import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
 
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.annotate.JsonAnySetter;
 
 /**
@@ -243,7 +244,7 @@ public class LoggedTask implements DeepCompare {
   }
 
   private static String canonicalizeCounterName(String nonCanonicalName) {
-    String result = nonCanonicalName.toLowerCase();
+    String result = StringUtils.toLowerCase(nonCanonicalName);
 
     result = result.replace(' ', '|');
     result = result.replace('-', '|');

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
index d1b365e..c21eb39 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.annotate.JsonAnySetter;
 
 // HACK ALERT!!!  This "should" have have two subclasses, which might be called
@@ -611,7 +612,7 @@ public class LoggedTaskAttempt implements DeepCompare {
   }
   
   private static String canonicalizeCounterName(String nonCanonicalName) {
-    String result = nonCanonicalName.toLowerCase();
+    String result = StringUtils.toLowerCase(nonCanonicalName);
 
     result = result.replace(' ', '|');
     result = result.replace('-', '|');

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
index 98d8aa03..bc92b71 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
@@ -25,6 +25,7 @@ import java.util.*;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * This is a class used to get the current environment
@@ -43,7 +44,7 @@ public class Environment extends Properties {
     // http://lopica.sourceforge.net/os.html
     String command = null;
     String OS = System.getProperty("os.name");
-    String lowerOs = OS.toLowerCase();
+    String lowerOs = StringUtils.toLowerCase(OS);
     if (OS.indexOf("Windows") > -1) {
       command = "cmd /C set";
     } else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
index de8f740..108ad0b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
@@ -36,6 +36,7 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -173,7 +174,7 @@ public class ApplicationCLI extends YarnCLI {
           if (types != null) {
             for (String type : types) {
               if (!type.trim().isEmpty()) {
-                appTypes.add(type.toUpperCase().trim());
+                appTypes.add(StringUtils.toUpperCase(type).trim());
               }
             }
           }
@@ -191,8 +192,8 @@ public class ApplicationCLI extends YarnCLI {
                   break;
                 }
                 try {
-                  appStates.add(YarnApplicationState.valueOf(state
-                      .toUpperCase().trim()));
+                  appStates.add(YarnApplicationState.valueOf(
+                      StringUtils.toUpperCase(state).trim()));
                 } catch (IllegalArgumentException ex) {
                   sysout.println("The application state " + state
                       + " is invalid.");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index d603626..4f0ddfe 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -111,7 +111,8 @@ public class NodeCLI extends YarnCLI {
         if (types != null) {
           for (String type : types) {
             if (!type.trim().isEmpty()) {
-              nodeStates.add(NodeState.valueOf(type.trim().toUpperCase()));
+              nodeStates.add(NodeState.valueOf(
+                  org.apache.hadoop.util.StringUtils.toUpperCase(type.trim())));
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
index a8996f0..ad009d6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
@@ -26,6 +26,7 @@ import java.util.Set;
 import org.apache.commons.lang.math.LongRange;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
 import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
@@ -213,7 +214,7 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
     // Convert applicationTags to lower case and add
     this.applicationTags = new HashSet<String>();
     for (String tag : tags) {
-      this.applicationTags.add(tag.toLowerCase());
+      this.applicationTags.add(StringUtils.toLowerCase(tag));
     }
   }
 
@@ -258,7 +259,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
   public void setApplicationStates(Set<String> applicationStates) {
     EnumSet<YarnApplicationState> appStates = null;
     for (YarnApplicationState state : YarnApplicationState.values()) {
-      if (applicationStates.contains(state.name().toLowerCase())) {
+      if (applicationStates.contains(
+          StringUtils.toLowerCase(state.name()))) {
         if (appStates == null) {
           appStates = EnumSet.of(state);
         } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
index 303b437..67e3a84 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
@@ -23,6 +23,7 @@ import java.util.Set;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
@@ -291,7 +292,7 @@ extends ApplicationSubmissionContext {
     // Convert applicationTags to lower case and add
     this.applicationTags = new HashSet<String>();
     for (String tag : tags) {
-      this.applicationTags.add(tag.toLowerCase());
+      this.applicationTags.add(StringUtils.toLowerCase(tag));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
index 870aa95..bd9c907 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
@@ -23,7 +23,6 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
-import java.util.Locale;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
@@ -47,6 +46,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.RunJar;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 
@@ -272,7 +272,7 @@ public class FSDownload implements Callable<Path> {
   private long unpack(File localrsrc, File dst) throws IOException {
     switch (resource.getType()) {
     case ARCHIVE: {
-      String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
+      String lowerDst = StringUtils.toLowerCase(dst.getName());
       if (lowerDst.endsWith(".jar")) {
         RunJar.unJar(localrsrc, dst);
       } else if (lowerDst.endsWith(".zip")) {
@@ -291,7 +291,7 @@ public class FSDownload implements Callable<Path> {
     }
     break;
     case PATTERN: {
-      String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
+      String lowerDst = StringUtils.toLowerCase(dst.getName());
       if (lowerDst.endsWith(".jar")) {
         String p = resource.getPattern();
         RunJar.unJar(localrsrc, dst,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
index c848828..5acb3f3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
@@ -26,7 +26,6 @@ import java.lang.annotation.Annotation;
 import java.lang.reflect.Method;
 import java.lang.reflect.ParameterizedType;
 import java.lang.reflect.Type;
-import java.util.Locale;
 import java.util.Set;
 import java.util.regex.Pattern;
 
@@ -35,6 +34,7 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.webapp.WebAppException;
 
 import org.slf4j.Logger;
@@ -241,7 +241,7 @@ public class HamletGen {
     puts(indent, "\n",
          "private <T extends _> ", retName, "<T> ", methodName,
          "_(T e, boolean inline) {\n",
-         "  return new ", retName, "<T>(\"", retName.toLowerCase(Locale.US),
+         "  return new ", retName, "<T>(\"", StringUtils.toLowerCase(retName),
          "\", e, opt(", !endTagOptional.contains(retName), ", inline, ",
          retName.equals("PRE"), ")); }");
   }
@@ -258,7 +258,7 @@ public class HamletGen {
       puts(0, ") {");
       puts(indent,
            topMode ? "" : "  closeAttrs();\n",
-           "  return ", retName.toLowerCase(Locale.US), "_(this, ",
+           "  return ", StringUtils.toLowerCase(retName), "_" + "(this, ",
            isInline(className, retName), ");\n", "}");
     } else if (params.length == 1) {
       puts(0, "String selector) {");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
index 68dc84e..06a56d8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
@@ -88,7 +88,8 @@ public class RegistryUtils {
    * @return the converted username
    */
   public static String convertUsername(String username) {
-    String converted= username.toLowerCase(Locale.ENGLISH);
+    String converted =
+        org.apache.hadoop.util.StringUtils.toLowerCase(username);
     int atSymbol = converted.indexOf('@');
     if (atSymbol > 0) {
       converted = converted.substring(0, atSymbol);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index 2faba5f..9edc9ab 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -31,6 +31,7 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
 import org.apache.hadoop.yarn.server.webapp.WebServices;
@@ -147,7 +148,8 @@ public class AHSWebServices extends WebServices {
     }
     Set<String> appStates = parseQueries(statesQuery, true);
     for (String appState : appStates) {
-      switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
+      switch (YarnApplicationState.valueOf(
+          StringUtils.toUpperCase(appState))) {
         case FINISHED:
         case FAILED:
         case KILLED:

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
index 0907f2c..915e3f2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
@@ -52,6 +52,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineDomains;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
@@ -417,7 +418,7 @@ public class TimelineWebServices {
     String[] strs = str.split(delimiter);
     List<Field> fieldList = new ArrayList<Field>();
     for (String s : strs) {
-      s = s.trim().toUpperCase();
+      s = StringUtils.toUpperCase(s.trim());
       if (s.equals("EVENTS")) {
         fieldList.add(Field.EVENTS);
       } else if (s.equals("LASTEVENTONLY")) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
index a02b80f..909bf1d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
@@ -31,6 +31,7 @@ import javax.ws.rs.WebApplicationException;
 
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -172,9 +173,9 @@ public class WebServices {
         break;
       }
 
-      if (checkAppStates
-          && !appStates.contains(appReport.getYarnApplicationState().toString()
-            .toLowerCase())) {
+      if (checkAppStates &&
+          !appStates.contains(StringUtils.toLowerCase(
+              appReport.getYarnApplicationState().toString()))) {
         continue;
       }
       if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) {
@@ -194,9 +195,9 @@ public class WebServices {
           continue;
         }
       }
-      if (checkAppTypes
-          && !appTypes.contains(appReport.getApplicationType().trim()
-            .toLowerCase())) {
+      if (checkAppTypes &&
+          !appTypes.contains(
+              StringUtils.toLowerCase(appReport.getApplicationType().trim()))) {
         continue;
       }
 
@@ -404,7 +405,8 @@ public class WebServices {
               if (isState) {
                 try {
                   // enum string is in the uppercase
-                  YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
+                  YarnApplicationState.valueOf(
+                      StringUtils.toUpperCase(paramStr.trim()));
                 } catch (RuntimeException e) {
                   YarnApplicationState[] stateArray =
                       YarnApplicationState.values();
@@ -414,7 +416,7 @@ public class WebServices {
                       + allAppStates);
                 }
               }
-              params.add(paramStr.trim().toLowerCase());
+              params.add(StringUtils.toLowerCase(paramStr.trim()));
             }
           }
         }


[3/3] hadoop git commit: HADOOP-11602. Backport 'Fix toUpperCase/toLowerCase to use Locale.ENGLISH.' (ozawa)

Posted by oz...@apache.org.
HADOOP-11602. Backport 'Fix toUpperCase/toLowerCase to use Locale.ENGLISH.' (ozawa)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b46f9e72
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b46f9e72
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b46f9e72

Branch: refs/heads/branch-2
Commit: b46f9e72dbed6fd1f8cae1e12973252462d6ee15
Parents: d941631
Author: Tsuyoshi Ozawa <oz...@apache.org>
Authored: Mon Mar 9 19:53:03 2015 +0900
Committer: Tsuyoshi Ozawa <oz...@apache.org>
Committed: Mon Mar 9 19:53:55 2015 +0900

----------------------------------------------------------------------
 .../classification/tools/StabilityOptions.java  |  5 ++-
 .../AltKerberosAuthenticationHandler.java       |  6 ++-
 .../authentication/util/KerberosUtil.java       |  2 +-
 .../authentication/util/TestKerberosUtil.java   | 14 ++++---
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 +
 .../org/apache/hadoop/conf/Configuration.java   |  6 +--
 .../org/apache/hadoop/crypto/CipherSuite.java   |  3 +-
 .../hadoop/crypto/key/JavaKeyStoreProvider.java |  3 +-
 .../java/org/apache/hadoop/fs/FileSystem.java   |  7 +++-
 .../java/org/apache/hadoop/fs/StorageType.java  |  3 +-
 .../apache/hadoop/fs/permission/AclEntry.java   |  5 ++-
 .../apache/hadoop/fs/shell/XAttrCommands.java   |  2 +-
 .../org/apache/hadoop/fs/shell/find/Name.java   |  5 ++-
 .../io/compress/CompressionCodecFactory.java    |  7 ++--
 .../hadoop/metrics2/impl/MetricsConfig.java     |  7 ++--
 .../hadoop/metrics2/impl/MetricsSystemImpl.java |  5 ++-
 .../hadoop/record/compiler/CGenerator.java      | 10 +++--
 .../hadoop/record/compiler/CppGenerator.java    | 10 +++--
 .../hadoop/record/compiler/generated/Rcc.java   |  5 ++-
 .../hadoop/security/SaslPropertiesResolver.java |  3 +-
 .../apache/hadoop/security/SecurityUtil.java    | 12 +++---
 .../hadoop/security/WhitelistBasedResolver.java |  3 +-
 .../security/ssl/FileBasedKeyStoresFactory.java |  4 +-
 .../apache/hadoop/security/ssl/SSLFactory.java  |  5 ++-
 .../security/ssl/SSLHostnameVerifier.java       | 10 +++--
 .../DelegationTokenAuthenticationHandler.java   |  3 +-
 .../web/DelegationTokenAuthenticator.java       |  3 +-
 .../apache/hadoop/util/ComparableVersion.java   |  3 +-
 .../org/apache/hadoop/util/StringUtils.java     | 40 +++++++++++++++++++-
 .../java/org/apache/hadoop/ipc/TestIPC.java     |  2 +-
 .../java/org/apache/hadoop/ipc/TestSaslRPC.java |  2 +-
 .../hadoop/security/TestSecurityUtil.java       |  8 ++--
 .../security/TestUserGroupInformation.java      |  5 ++-
 .../hadoop/test/TimedOutTestsListener.java      |  6 ++-
 .../org/apache/hadoop/util/TestStringUtils.java | 21 ++++++++++
 .../org/apache/hadoop/util/TestWinUtils.java    |  6 ++-
 .../java/org/apache/hadoop/nfs/NfsExports.java  |  5 ++-
 .../server/CheckUploadContentTypeFilter.java    |  4 +-
 .../hadoop/fs/http/server/FSOperations.java     |  7 +++-
 .../http/server/HttpFSParametersProvider.java   |  4 +-
 .../org/apache/hadoop/lib/server/Server.java    |  3 +-
 .../service/hadoop/FileSystemAccessService.java |  6 ++-
 .../org/apache/hadoop/lib/wsrs/EnumParam.java   |  2 +-
 .../apache/hadoop/lib/wsrs/EnumSetParam.java    |  3 +-
 .../hadoop/lib/wsrs/ParametersProvider.java     |  3 +-
 .../org/apache/hadoop/hdfs/XAttrHelper.java     | 19 ++++++----
 .../hadoop/hdfs/protocol/HdfsConstants.java     |  3 +-
 .../BlockStoragePolicySuite.java                |  4 +-
 .../hdfs/server/common/HdfsServerConstants.java |  5 ++-
 .../hdfs/server/datanode/StorageLocation.java   |  4 +-
 .../hdfs/server/namenode/FSEditLogOp.java       |  3 +-
 .../namenode/QuotaByStorageTypeEntry.java       |  4 +-
 .../hdfs/server/namenode/SecondaryNameNode.java |  2 +-
 .../org/apache/hadoop/hdfs/tools/GetConf.java   | 17 +++++----
 .../OfflineEditsVisitorFactory.java             |  7 ++--
 .../offlineImageViewer/FSImageHandler.java      |  4 +-
 .../org/apache/hadoop/hdfs/web/AuthFilter.java  |  3 +-
 .../org/apache/hadoop/hdfs/web/ParamFilter.java |  3 +-
 .../hadoop/hdfs/web/WebHdfsFileSystem.java      |  4 +-
 .../hadoop/hdfs/web/resources/EnumParam.java    |  3 +-
 .../hadoop/hdfs/web/resources/EnumSetParam.java |  3 +-
 .../namenode/snapshot/TestSnapshotManager.java  |  6 +--
 .../jobhistory/JobHistoryEventHandler.java      |  3 +-
 .../mapreduce/v2/app/webapp/AppController.java  |  6 +--
 .../apache/hadoop/mapreduce/TypeConverter.java  |  3 +-
 .../apache/hadoop/mapreduce/v2/util/MRApps.java |  4 +-
 .../hadoop/mapreduce/TestTypeConverter.java     |  6 ++-
 .../java/org/apache/hadoop/mapred/Task.java     |  2 +-
 .../counters/FileSystemCounterGroup.java        |  4 +-
 .../mapreduce/filecache/DistributedCache.java   |  4 +-
 .../hadoop/mapreduce/lib/db/DBInputFormat.java  |  5 ++-
 .../org/apache/hadoop/mapreduce/tools/CLI.java  |  9 +++--
 .../java/org/apache/hadoop/fs/TestDFSIO.java    | 12 +++---
 .../org/apache/hadoop/fs/TestFileSystem.java    |  4 +-
 .../org/apache/hadoop/fs/slive/Constants.java   |  6 ++-
 .../apache/hadoop/fs/slive/OperationData.java   |  3 +-
 .../apache/hadoop/fs/slive/OperationOutput.java |  4 +-
 .../org/apache/hadoop/fs/slive/SliveTest.java   |  3 +-
 .../java/org/apache/hadoop/io/FileBench.java    | 17 +++++----
 .../org/apache/hadoop/mapred/TestMapRed.java    |  3 +-
 .../apache/hadoop/examples/DBCountPageView.java |  2 +-
 .../plugin/versioninfo/VersionInfoMojo.java     |  4 +-
 .../fs/azure/AzureNativeFileSystemStore.java    |  4 +-
 .../apache/hadoop/tools/util/DistCpUtils.java   | 12 ++++--
 .../java/org/apache/hadoop/tools/DistCpV1.java  |  4 +-
 .../gridmix/GridmixJobSubmissionPolicy.java     |  3 +-
 .../TestSwiftFileSystemExtendedContract.java    |  4 +-
 .../hadoop/tools/rumen/HadoopLogsAnalyzer.java  | 33 ++++++++--------
 .../apache/hadoop/tools/rumen/JobBuilder.java   |  2 +-
 .../apache/hadoop/tools/rumen/LoggedTask.java   |  3 +-
 .../hadoop/tools/rumen/LoggedTaskAttempt.java   |  3 +-
 .../apache/hadoop/streaming/Environment.java    |  3 +-
 .../hadoop/yarn/client/cli/ApplicationCLI.java  |  7 ++--
 .../apache/hadoop/yarn/client/cli/NodeCLI.java  |  3 +-
 .../impl/pb/GetApplicationsRequestPBImpl.java   |  6 ++-
 .../pb/ApplicationSubmissionContextPBImpl.java  |  3 +-
 .../org/apache/hadoop/yarn/util/FSDownload.java |  6 +--
 .../hadoop/yarn/webapp/hamlet/HamletGen.java    |  6 +--
 .../registry/client/binding/RegistryUtils.java  |  3 +-
 .../webapp/AHSWebServices.java                  |  4 +-
 .../timeline/webapp/TimelineWebServices.java    |  3 +-
 .../hadoop/yarn/server/webapp/WebServices.java  | 18 +++++----
 .../server/resourcemanager/ClientRMService.java |  3 +-
 .../resource/ResourceWeights.java               |  3 +-
 .../CapacitySchedulerConfiguration.java         |  4 +-
 .../fair/FairSchedulerConfiguration.java        |  3 +-
 .../scheduler/fair/SchedulingPolicy.java        |  3 +-
 .../resourcemanager/webapp/NodesPage.java       |  2 +-
 .../resourcemanager/webapp/RMWebServices.java   | 20 ++++++----
 109 files changed, 418 insertions(+), 228 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
index dbce31e..657dbce 100644
--- a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
+++ b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
@@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 class StabilityOptions {
   public static final String STABLE_OPTION = "-stable";
@@ -28,7 +29,7 @@ class StabilityOptions {
   public static final String UNSTABLE_OPTION = "-unstable";
 
   public static Integer optionLength(String option) {
-    String opt = option.toLowerCase();
+    String opt = option.toLowerCase(Locale.ENGLISH);
     if (opt.equals(UNSTABLE_OPTION)) return 1;
     if (opt.equals(EVOLVING_OPTION)) return 1;
     if (opt.equals(STABLE_OPTION)) return 1;
@@ -38,7 +39,7 @@ class StabilityOptions {
   public static void validOptions(String[][] options,
       DocErrorReporter reporter) {
     for (int i = 0; i < options.length; i++) {
-      String opt = options[i][0].toLowerCase();
+      String opt = options[i][0].toLowerCase(Locale.ENGLISH);
       if (opt.equals(UNSTABLE_OPTION)) {
 	RootDocProcessor.stability = UNSTABLE_OPTION;
       } else if (opt.equals(EVOLVING_OPTION)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
index 987330f..dae3b50 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
@@ -14,6 +14,7 @@
 package org.apache.hadoop.security.authentication.server;
 
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Properties;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
@@ -68,7 +69,8 @@ public abstract class AltKerberosAuthenticationHandler
             NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT)
             .split("\\W*,\\W*");
     for (int i = 0; i < nonBrowserUserAgents.length; i++) {
-        nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase();
+        nonBrowserUserAgents[i] =
+            nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH);
     }
   }
 
@@ -120,7 +122,7 @@ public abstract class AltKerberosAuthenticationHandler
     if (userAgent == null) {
       return false;
     }
-    userAgent = userAgent.toLowerCase();
+    userAgent = userAgent.toLowerCase(Locale.ENGLISH);
     boolean isBrowser = true;
     for (String nonBrowserUserAgent : nonBrowserUserAgents) {
         if (userAgent.contains(nonBrowserUserAgent)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
index 0e8d8db..f7f5f63 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
@@ -110,7 +110,7 @@ public class KerberosUtil {
     }
     // convert hostname to lowercase as kerberos does not work with hostnames
     // with uppercase characters.
-    return service + "/" + fqdn.toLowerCase(Locale.US);
+    return service + "/" + fqdn.toLowerCase(Locale.ENGLISH);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
index b0e8f04..89e07d1 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import org.apache.directory.server.kerberos.shared.keytab.Keytab;
@@ -58,24 +59,25 @@ public class TestKerberosUtil {
 
     // send null hostname
     Assert.assertEquals("When no hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, null));
     // send empty hostname
     Assert.assertEquals("When empty hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, ""));
     // send 0.0.0.0 hostname
     Assert.assertEquals("When 0.0.0.0 hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
     // send uppercase hostname
     Assert.assertEquals("When uppercase hostname is sent",
-        service + "/" + testHost.toLowerCase(),
+        service + "/" + testHost.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, testHost));
     // send lowercase hostname
     Assert.assertEquals("When lowercase hostname is sent",
-        service + "/" + testHost.toLowerCase(),
-        KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
+        service + "/" + testHost.toLowerCase(Locale.ENGLISH),
+        KerberosUtil.getServicePrincipal(
+            service, testHost.toLowerCase(Locale.ENGLISH)));
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8c66fe7..11ad906 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -654,6 +654,8 @@ Release 2.7.0 - UNRELEASED
 
     HADOOP-11670. Regression: s3a auth setup broken. (Adam Budde via stevel)
 
+    HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 3ae47e5..4cea6a4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -1434,11 +1434,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
       return defaultValue;
     }
 
-    valueString = valueString.toLowerCase();
-
-    if ("true".equals(valueString))
+    if (StringUtils.equalsIgnoreCase("true", valueString))
       return true;
-    else if ("false".equals(valueString))
+    else if (StringUtils.equalsIgnoreCase("false", valueString))
       return false;
     else return defaultValue;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
index c9355d7..a811aa7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.crypto;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Defines properties of a CipherSuite. Modeled after the ciphers in
@@ -97,7 +98,7 @@ public enum CipherSuite {
     String[] parts = name.split("/");
     StringBuilder suffix = new StringBuilder();
     for (String part : parts) {
-      suffix.append(".").append(part.toLowerCase());
+      suffix.append(".").append(StringUtils.toLowerCase(part));
     }
     
     return suffix.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
index bfec1ef..c0d510d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.ProviderUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -422,7 +423,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
   @Override
   public KeyVersion createKey(String name, byte[] material,
                                Options options) throws IOException {
-    Preconditions.checkArgument(name.equals(name.toLowerCase()),
+    Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)),
         "Uppercase key names are unsupported: %s", name);
     writeLock.lock();
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 56c7d63..d2b5744 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -65,6 +65,7 @@ import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ShutdownHookManager;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -2796,8 +2797,10 @@ public abstract class FileSystem extends Configured implements Closeable {
       }
 
       Key(URI uri, Configuration conf, long unique) throws IOException {
-        scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
-        authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
+        scheme = uri.getScheme()==null ?
+            "" : StringUtils.toLowerCase(uri.getScheme());
+        authority = uri.getAuthority()==null ?
+            "" : StringUtils.toLowerCase(uri.getAuthority());
         this.unique = unique;
         
         this.ugi = UserGroupInformation.getCurrentUser();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java
index e306502..68069d7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Defines the types of supported storage media. The default storage
@@ -78,7 +79,7 @@ public enum StorageType {
   }
 
   public static StorageType parseStorageType(String s) {
-    return StorageType.valueOf(s.toUpperCase());
+    return StorageType.valueOf(StringUtils.toUpperCase(s));
   }
 
   private static List<StorageType> getNonTransientTypes() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
index b9def64..45402f8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
@@ -106,7 +106,7 @@ public class AclEntry {
       sb.append("default:");
     }
     if (type != null) {
-      sb.append(type.toString().toLowerCase());
+      sb.append(StringUtils.toLowerCase(type.toString()));
     }
     sb.append(':');
     if (name != null) {
@@ -263,7 +263,8 @@ public class AclEntry {
 
     AclEntryType aclType = null;
     try {
-      aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase());
+      aclType = Enum.valueOf(
+          AclEntryType.class, StringUtils.toUpperCase(split[index]));
       builder.setType(aclType);
       index++;
     } catch (IllegalArgumentException iae) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
index 4efda87..d55c80b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
@@ -79,7 +79,7 @@ class XAttrCommands extends FsCommand {
       String en = StringUtils.popOptionWithArgument("-e", args);
       if (en != null) {
         try {
-          encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH));
+          encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
         } catch (IllegalArgumentException e) {
           throw new IllegalArgumentException(
               "Invalid/unsupported encoding option specified: " + en);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
index 88314c6..c89daa9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
@@ -22,6 +22,7 @@ import java.util.Deque;
 
 import org.apache.hadoop.fs.GlobPattern;
 import org.apache.hadoop.fs.shell.PathData;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Implements the -name expression for the
@@ -73,7 +74,7 @@ final class Name extends BaseExpression {
   public void prepare() throws IOException {
     String argPattern = getArgument(1);
     if (!caseSensitive) {
-      argPattern = argPattern.toLowerCase();
+      argPattern = StringUtils.toLowerCase(argPattern);
     }
     globPattern = new GlobPattern(argPattern);
   }
@@ -82,7 +83,7 @@ final class Name extends BaseExpression {
   public Result apply(PathData item, int depth) throws IOException {
     String name = getPath(item).getName();
     if (!caseSensitive) {
-      name = name.toLowerCase();
+      name = StringUtils.toLowerCase(name);
     }
     if (globPattern.matches(name)) {
       return Result.PASS;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
index 7476a15..8fff75d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * A factory that will find the correct codec for a given filename.
@@ -66,10 +67,10 @@ public class CompressionCodecFactory {
     codecsByClassName.put(codec.getClass().getCanonicalName(), codec);
 
     String codecName = codec.getClass().getSimpleName();
-    codecsByName.put(codecName.toLowerCase(), codec);
+    codecsByName.put(StringUtils.toLowerCase(codecName), codec);
     if (codecName.endsWith("Codec")) {
       codecName = codecName.substring(0, codecName.length() - "Codec".length());
-      codecsByName.put(codecName.toLowerCase(), codec);
+      codecsByName.put(StringUtils.toLowerCase(codecName), codec);
     }
   }
 
@@ -246,7 +247,7 @@ public class CompressionCodecFactory {
       if (codec == null) {
         // trying to get the codec by name in case the name was specified
         // instead a class
-        codec = codecsByName.get(codecName.toLowerCase());
+        codec = codecsByName.get(StringUtils.toLowerCase(codecName));
       }
       return codec;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
index 1646024..b863f55 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
@@ -44,6 +44,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.MetricsFilter;
 import org.apache.hadoop.metrics2.MetricsPlugin;
 import org.apache.hadoop.metrics2.filter.GlobFilter;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Metrics configuration for MetricsSystemImpl
@@ -85,12 +86,12 @@ class MetricsConfig extends SubsetConfiguration {
   private ClassLoader pluginLoader;
 
   MetricsConfig(Configuration c, String prefix) {
-    super(c, prefix.toLowerCase(Locale.US), ".");
+    super(c, StringUtils.toLowerCase(prefix), ".");
   }
 
   static MetricsConfig create(String prefix) {
-    return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
-                     +".properties", DEFAULT_FILE_NAME);
+    return loadFirst(prefix, "hadoop-metrics2-" +
+        StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME);
   }
 
   static MetricsConfig create(String prefix, String... fileNames) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
index b7660e5..b7f264b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
@@ -61,6 +61,7 @@ import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
 import org.apache.hadoop.metrics2.lib.MutableStat;
 import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
 /**
@@ -620,7 +621,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
     LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY));
     String m = System.getProperty(MS_INIT_MODE_KEY);
     String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
-    return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
-                            .toUpperCase(Locale.US));
+    return InitMode.valueOf(
+        StringUtils.toUpperCase((m2 == null ? InitMode.NORMAL.name() : m2)));
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java
index 69ab37a..e23f353 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java
@@ -23,6 +23,7 @@ import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * C Code generator front-end for Hadoop record I/O.
@@ -46,8 +47,10 @@ class CGenerator extends CodeGenerator {
     try {
       FileWriter hh = new FileWriter(name+".h");
       try {
-        hh.write("#ifndef __"+name.toUpperCase().replace('.','_')+"__\n");
-        hh.write("#define __"+name.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#ifndef __"+
+            StringUtils.toUpperCase(name).replace('.','_')+"__\n");
+        hh.write("#define __"+
+            StringUtils.toUpperCase(name).replace('.','_')+"__\n");
         hh.write("#include \"recordio.h\"\n");
         for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
           hh.write("#include \""+iter.next().getName()+".h\"\n");
@@ -61,7 +64,8 @@ class CGenerator extends CodeGenerator {
         }
          */
 
-        hh.write("#endif //"+name.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#endif //"+
+            StringUtils.toUpperCase(name).replace('.','_')+"__\n");
       } finally {
         hh.close();
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java
index 1c97a48..081ae09 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java
@@ -23,6 +23,7 @@ import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * C++ Code generator front-end for Hadoop record I/O.
@@ -49,8 +50,10 @@ class CppGenerator extends CodeGenerator {
       
       try {
         String fileName = (new File(name)).getName();
-        hh.write("#ifndef __"+fileName.toUpperCase().replace('.','_')+"__\n");
-        hh.write("#define __"+fileName.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#ifndef __"+
+            StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
+        hh.write("#define __"+
+            StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
         hh.write("#include \"recordio.hh\"\n");
         hh.write("#include \"recordTypeInfo.hh\"\n");
         for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
@@ -64,7 +67,8 @@ class CppGenerator extends CodeGenerator {
           iter.next().genCppCode(hh, cc, options);
         }
         
-        hh.write("#endif //"+fileName.toUpperCase().replace('.','_')+"__\n");
+        hh.write("#endif //"+
+            StringUtils.toUpperCase(fileName).replace('.','_')+"__\n");
       } finally {
         hh.close();
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
index c4c74cd..c1a965f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java
@@ -28,6 +28,7 @@ import java.io.File;
 import java.io.FileReader;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
@@ -58,7 +59,7 @@ public class Rcc implements RccConstants {
     for (int i=0; i<args.length; i++) {
       if ("-l".equalsIgnoreCase(args[i]) ||
           "--language".equalsIgnoreCase(args[i])) {
-        language = args[i+1].toLowerCase();
+        language = StringUtils.toLowerCase(args[i+1]);
         i++;
       } else if ("-d".equalsIgnoreCase(args[i]) ||
                  "--destdir".equalsIgnoreCase(args[i])) {
@@ -69,7 +70,7 @@ public class Rcc implements RccConstants {
         if (arg.startsWith("-")) {
           arg = arg.substring(1);
         }
-        cmdargs.add(arg.toLowerCase());
+        cmdargs.add(StringUtils.toLowerCase(arg));
       } else {
         recFiles.add(args[i]);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
index 0b49cfb..305443c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java
@@ -66,7 +66,8 @@ public class SaslPropertiesResolver implements Configurable{
         CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION,
         QualityOfProtection.AUTHENTICATION.toString());
     for (int i=0; i < qop.length; i++) {
-      qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop();
+      qop[i] = QualityOfProtection.valueOf(
+          StringUtils.toUpperCase(qop[i])).getSaslQop();
     }
     properties.put(Sasl.QOP, StringUtils.join(",", qop));
     properties.put(Sasl.SERVER_AUTH, "true");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index 7cbee26..eddf98d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -27,7 +27,6 @@ import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.List;
-import java.util.Locale;
 import java.util.ServiceLoader;
 
 import javax.security.auth.kerberos.KerberosPrincipal;
@@ -44,6 +43,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.util.StringUtils;
 
 
 //this will need to be replaced someday when there is a suitable replacement
@@ -182,7 +182,8 @@ public class SecurityUtil {
     if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
       fqdn = getLocalHostName();
     }
-    return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2];
+    return components[0] + "/" +
+        StringUtils.toLowerCase(fqdn) + "@" + components[2];
   }
   
   static String getLocalHostName() throws UnknownHostException {
@@ -379,7 +380,7 @@ public class SecurityUtil {
       }
       host = addr.getAddress().getHostAddress();
     } else {
-      host = addr.getHostName().toLowerCase();
+      host = StringUtils.toLowerCase(addr.getHostName());
     }
     return new Text(host + ":" + addr.getPort());
   }
@@ -606,7 +607,8 @@ public class SecurityUtil {
   public static AuthenticationMethod getAuthenticationMethod(Configuration conf) {
     String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
     try {
-      return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase(Locale.ENGLISH));
+      return Enum.valueOf(AuthenticationMethod.class,
+          StringUtils.toUpperCase(value));
     } catch (IllegalArgumentException iae) {
       throw new IllegalArgumentException("Invalid attribute value for " +
           HADOOP_SECURITY_AUTHENTICATION + " of " + value);
@@ -619,7 +621,7 @@ public class SecurityUtil {
       authenticationMethod = AuthenticationMethod.SIMPLE;
     }
     conf.set(HADOOP_SECURITY_AUTHENTICATION,
-             authenticationMethod.toString().toLowerCase(Locale.ENGLISH));
+        StringUtils.toLowerCase(authenticationMethod.toString()));
   }
 
   /*

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
index dc0815e..8d4df64 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
@@ -138,7 +138,8 @@ public class WhitelistBasedResolver extends SaslPropertiesResolver {
         QualityOfProtection.PRIVACY.toString());
 
     for (int i=0; i < qop.length; i++) {
-      qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
+      qop[i] = QualityOfProtection.valueOf(
+          StringUtils.toUpperCase(qop[i])).getSaslQop();
     }
 
     saslProps.put(Sasl.QOP, StringUtils.join(",", qop));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
index 4b81e17..609c71f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.net.ssl.KeyManager;
 import javax.net.ssl.KeyManagerFactory;
@@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory {
   @VisibleForTesting
   public static String resolvePropertyName(SSLFactory.Mode mode,
                                            String template) {
-    return MessageFormat.format(template, mode.toString().toLowerCase());
+    return MessageFormat.format(
+        template, StringUtils.toLowerCase(mode.toString()));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
index bbea33b..edec347 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import javax.net.ssl.HostnameVerifier;
@@ -137,8 +138,8 @@ public class SSLFactory implements ConnectionConfigurator {
 
   private HostnameVerifier getHostnameVerifier(Configuration conf)
       throws GeneralSecurityException, IOException {
-    return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").
-        trim().toUpperCase());
+    return getHostnameVerifier(StringUtils.toUpperCase(
+        conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim()));
   }
 
   public static HostnameVerifier getHostnameVerifier(String verifier)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
index dd5e67b..b5ef2b2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
@@ -52,6 +52,7 @@ import javax.net.ssl.SSLSocket;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  ************************************************************************
@@ -365,7 +366,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
             buf.append('<');
             for (int i = 0; i < hosts.length; i++) {
                 String h = hosts[i];
-                h = h != null ? h.trim().toLowerCase() : "";
+                h = h != null ? StringUtils.toLowerCase(h.trim()) : "";
                 hosts[i] = h;
                 if (i > 0) {
                     buf.append('/');
@@ -406,7 +407,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
             out:
             for (Iterator<String> it = names.iterator(); it.hasNext();) {
                 // Don't trim the CN, though!
-                final String cn = it.next().toLowerCase();
+                final String cn = StringUtils.toLowerCase(it.next());
                 // Store CN in StringBuffer in case we need to report an error.
                 buf.append(" <");
                 buf.append(cn);
@@ -424,7 +425,8 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
                                      acceptableCountryWildcard(cn);
 
                 for (int i = 0; i < hosts.length; i++) {
-                    final String hostName = hosts[i].trim().toLowerCase();
+                    final String hostName =
+                        StringUtils.toLowerCase(hosts[i].trim());
                     if (doWildcard) {
                         match = hostName.endsWith(cn.substring(1));
                         if (match && strictWithSubDomains) {
@@ -479,7 +481,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
         }
 
         public static boolean isLocalhost(String host) {
-            host = host != null ? host.trim().toLowerCase() : "";
+            host = host != null ? StringUtils.toLowerCase(host.trim()) : "";
             if (host.startsWith("::1")) {
                 int x = host.lastIndexOf('%');
                 if (x >= 0) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
index c18b5d3..c498f70 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.map.ObjectMapper;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -169,7 +170,7 @@ public abstract class DelegationTokenAuthenticationHandler
     boolean requestContinues = true;
     String op = ServletUtils.getParameter(request,
         KerberosDelegationTokenAuthenticator.OP_PARAM);
-    op = (op != null) ? op.toUpperCase() : null;
+    op = (op != null) ? StringUtils.toUpperCase(op) : null;
     if (DELEGATION_TOKEN_OPS.contains(op) &&
         !request.getMethod().equals("OPTIONS")) {
       KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
index d93f7ac..8a3a57f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -286,7 +287,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
     HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
     if (hasResponse) {
       String contentType = conn.getHeaderField(CONTENT_TYPE);
-      contentType = (contentType != null) ? contentType.toLowerCase()
+      contentType = (contentType != null) ? StringUtils.toLowerCase(contentType)
                                           : null;
       if (contentType != null &&
           contentType.contains(APPLICATION_JSON_MIME)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
index 65d85f7..9d34518 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
@@ -37,7 +37,6 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 import java.util.ListIterator;
-import java.util.Locale;
 import java.util.Properties;
 import java.util.Stack;
 
@@ -363,7 +362,7 @@ public class ComparableVersion
 
         items = new ListItem();
 
-        version = version.toLowerCase( Locale.ENGLISH );
+        version = StringUtils.toLowerCase(version);
 
         ListItem list = items;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
index ff8edc3..fc4b0ab 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.util;
 
+import com.google.common.base.Preconditions;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.net.URI;
@@ -901,7 +902,7 @@ public class StringUtils {
    */
   public static String camelize(String s) {
     StringBuilder sb = new StringBuilder();
-    String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
+    String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR,  '_');
 
     for (String word : words)
       sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
@@ -1032,4 +1033,41 @@ public class StringUtils {
     }
     return null;
   }
+
+  /**
+   * Converts all of the characters in this String to lower case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to lowercase.
+   */
+  public static String toLowerCase(String str) {
+    return str.toLowerCase(Locale.ENGLISH);
+  }
+
+  /**
+   * Converts all of the characters in this String to upper case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to uppercase.
+   */
+  public static String toUpperCase(String str) {
+    return str.toUpperCase(Locale.ENGLISH);
+  }
+
+  /**
+   * Compare strings locale-freely by using String#equalsIgnoreCase.
+   *
+   * @param s1  Non-null string to be converted
+   * @param s2  string to be converted
+   * @return     the str, converted to uppercase.
+   */
+  public static boolean equalsIgnoreCase(String s1, String s2) {
+    Preconditions.checkNotNull(s1);
+    // don't check non-null against s2 to make the semantics same as
+    // s1.equals(s2)
+    return s1.equalsIgnoreCase(s2);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
index eb19f48..b443011 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
@@ -1296,7 +1296,7 @@ public class TestIPC {
     
     StringBuilder hexString = new StringBuilder();
     
-    for (String line : hexdump.toUpperCase().split("\n")) {
+    for (String line : StringUtils.toUpperCase(hexdump).split("\n")) {
       hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", ""));
     }
     return StringUtils.hexStringToByte(hexString.toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
index 903990b..f6ab380 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
@@ -181,7 +181,7 @@ public class TestSaslRPC {
     StringBuilder sb = new StringBuilder();
     int i = 0;
     for (QualityOfProtection qop:qops){
-     sb.append(qop.name().toLowerCase());
+     sb.append(org.apache.hadoop.util.StringUtils.toLowerCase(qop.name()));
      if (++i < qops.length){
        sb.append(",");
      }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
index 3124ebd..f8f982d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.mockito.Mockito;
@@ -102,13 +103,14 @@ public class TestSecurityUtil {
     String realm = "@REALM";
     String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm;
     String hostname = "FooHost";
-    String principal = service + hostname.toLowerCase() + realm;
+    String principal =
+        service + StringUtils.toLowerCase(hostname) + realm;
     verify(principalInConf, hostname, principal);
   }
 
   @Test
   public void testLocalHostNameForNullOrWild() throws Exception {
-    String local = SecurityUtil.getLocalHostName();
+    String local = StringUtils.toLowerCase(SecurityUtil.getLocalHostName());
     assertEquals("hdfs/" + local + "@REALM",
                  SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null));
     assertEquals("hdfs/" + local + "@REALM",
@@ -259,7 +261,7 @@ public class TestSecurityUtil {
     //LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port);
 
     SecurityUtil.setTokenServiceUseIp(useIp);
-    String serviceHost = useIp ? ip : host.toLowerCase();
+    String serviceHost = useIp ? ip : StringUtils.toLowerCase(host);
     
     Token<?> token = new Token<TokenIdentifier>();
     Text service = new Text(serviceHost+":"+port);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
index f1fba03..e9802f6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.*;
 
 import javax.security.auth.Subject;
@@ -212,7 +213,7 @@ public class TestUserGroupInformation {
         userName = userName.substring(sp + 1);
       }
       // user names are case insensitive on Windows. Make consistent
-      userName = userName.toLowerCase();
+      userName = StringUtils.toLowerCase(userName);
     }
     // get the groups
     pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
@@ -232,7 +233,7 @@ public class TestUserGroupInformation {
     String loginUserName = login.getShortUserName();
     if(Shell.WINDOWS) {
       // user names are case insensitive on Windows. Make consistent
-      loginUserName = loginUserName.toLowerCase();
+      loginUserName = StringUtils.toLowerCase(loginUserName);
     }
     assertEquals(userName, loginUserName);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
index 220ab1d..1bdeddb 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
@@ -29,6 +29,7 @@ import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.Map;
 
+import org.apache.hadoop.util.StringUtils;
 import org.junit.runner.notification.Failure;
 import org.junit.runner.notification.RunListener;
 
@@ -93,8 +94,9 @@ public class TimedOutTestsListener extends RunListener {
           thread.getPriority(),
           thread.getId(),
           Thread.State.WAITING.equals(thread.getState()) ? 
-              "in Object.wait()" : thread.getState().name().toLowerCase(),
-          Thread.State.WAITING.equals(thread.getState()) ? 
+              "in Object.wait()" :
+              StringUtils.toLowerCase(thread.getState().name()),
+          Thread.State.WAITING.equals(thread.getState()) ?
               "WAITING (on object monitor)" : thread.getState()));
       for (StackTraceElement stackTraceElement : e.getValue()) {
         dump.append("\n        at ");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
index 0c930d4..515c3e0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
@@ -18,10 +18,12 @@
 
 package org.apache.hadoop.util;
 
+import java.util.Locale;
 import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String;
 import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -412,6 +414,25 @@ public class TestStringUtils extends UnitTestcaseTimeLimit {
     assertTrue(col.containsAll(Arrays.asList(new String[]{"foo","bar","baz","blah"})));
   }
 
+  @Test
+  public void testLowerAndUpperStrings() {
+    Locale defaultLocale = Locale.getDefault();
+    try {
+      Locale.setDefault(new Locale("tr", "TR"));
+      String upperStr = "TITLE";
+      String lowerStr = "title";
+      // Confirming TR locale.
+      assertNotEquals(lowerStr, upperStr.toLowerCase());
+      assertNotEquals(upperStr, lowerStr.toUpperCase());
+      // This should be true regardless of locale.
+      assertEquals(lowerStr, StringUtils.toLowerCase(upperStr));
+      assertEquals(upperStr, StringUtils.toUpperCase(lowerStr));
+      assertTrue(StringUtils.equalsIgnoreCase(upperStr, lowerStr));
+    } finally {
+      Locale.setDefault(defaultLocale);
+    }
+  }
+
   // Benchmark for StringUtils split
   public static void main(String []args) {
     final String TO_SPLIT = "foo,bar,baz,blah,blah";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
index d09c4de..987c706 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
@@ -382,8 +382,10 @@ public class TestWinUtils {
   private void assertOwners(File file, String expectedUser,
       String expectedGroup) throws IOException {
     String [] args = lsF(file).trim().split("[\\|]");
-    assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase());
-    assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
+    assertEquals(StringUtils.toLowerCase(expectedUser),
+        StringUtils.toLowerCase(args[2]));
+    assertEquals(StringUtils.toLowerCase(expectedGroup),
+        StringUtils.toLowerCase(args[3]));
   }
 
   @Test (timeout = 30000)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
index b617ae5..8b6b46a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
 import org.apache.hadoop.util.LightWeightCache;
 import org.apache.hadoop.util.LightWeightGSet;
 import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.base.Preconditions;
 
@@ -359,10 +360,10 @@ public class NfsExports {
     AccessPrivilege privilege = AccessPrivilege.READ_ONLY;
     switch (parts.length) {
     case 1:
-      host = parts[0].toLowerCase().trim();
+      host = StringUtils.toLowerCase(parts[0]).trim();
       break;
     case 2:
-      host = parts[0].toLowerCase().trim();
+      host = StringUtils.toLowerCase(parts[0]).trim();
       String option = parts[1].trim();
       if ("rw".equalsIgnoreCase(option)) {
         privilege = AccessPrivilege.READ_WRITE;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
index 836b4ce..81b0b7a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.fs.http.server;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -82,7 +83,8 @@ public class CheckUploadContentTypeFilter implements Filter {
     String method = httpReq.getMethod();
     if (method.equals("PUT") || method.equals("POST")) {
       String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
-      if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
+      if (op != null && UPLOAD_OPERATIONS.contains(
+          StringUtils.toUpperCase(op))) {
         if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) {
           String contentType = httpReq.getContentType();
           contentTypeOK =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
index 633589c..11cdb4d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.AclException;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.lib.service.FileSystemAccess;
+import org.apache.hadoop.util.StringUtils;
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 
@@ -439,7 +440,8 @@ public class FSOperations {
     @Override
     public JSONObject execute(FileSystem fs) throws IOException {
       boolean result = fs.truncate(path, newLength);
-      return toJSON(HttpFSFileSystem.TRUNCATE_JSON.toLowerCase(), result);
+      return toJSON(
+          StringUtils.toLowerCase(HttpFSFileSystem.TRUNCATE_JSON), result);
     }
 
   }
@@ -568,7 +570,8 @@ public class FSOperations {
     @Override
     public JSONObject execute(FileSystem fs) throws IOException {
       boolean deleted = fs.delete(path, recursive);
-      return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted);
+      return toJSON(
+          StringUtils.toLowerCase(HttpFSFileSystem.DELETE_JSON), deleted);
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
index 271f3d9..5c4204a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.lib.wsrs.Param;
 import org.apache.hadoop.lib.wsrs.ParametersProvider;
 import org.apache.hadoop.lib.wsrs.ShortParam;
 import org.apache.hadoop.lib.wsrs.StringParam;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.ws.rs.ext.Provider;
 import java.util.HashMap;
@@ -168,7 +169,8 @@ public class HttpFSParametersProvider extends ParametersProvider {
      */
     public OperationParam(String operation) {
       super(NAME, HttpFSFileSystem.Operation.class,
-            HttpFSFileSystem.Operation.valueOf(operation.toUpperCase()));
+            HttpFSFileSystem.Operation.valueOf(
+                StringUtils.toUpperCase(operation)));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
index 5c1bb4f..1a0f9ff 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.util.Check;
 import org.apache.hadoop.lib.util.ConfigurationUtils;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.PropertyConfigurator;
 import org.slf4j.Logger;
@@ -202,7 +203,7 @@ public class Server {
    * @param config server configuration.
    */
   public Server(String name, String homeDir, String configDir, String logDir, String tempDir, Configuration config) {
-    this.name = Check.notEmpty(name, "name").trim().toLowerCase();
+    this.name = StringUtils.toLowerCase(Check.notEmpty(name, "name").trim());
     this.homeDir = Check.notEmpty(homeDir, "homeDir");
     this.configDir = Check.notEmpty(configDir, "configDir");
     this.logDir = Check.notEmpty(logDir, "logDir");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
index ccb15a3..88780cb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.lib.service.Scheduler;
 import org.apache.hadoop.lib.util.Check;
 import org.apache.hadoop.lib.util.ConfigurationUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -254,7 +255,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
   private Set<String> toLowerCase(Collection<String> collection) {
     Set<String> set = new HashSet<String>();
     for (String value : collection) {
-      set.add(value.toLowerCase());
+      set.add(StringUtils.toLowerCase(value));
     }
     return set;
   }
@@ -300,7 +301,8 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
 
   protected void validateNamenode(String namenode) throws FileSystemAccessException {
     if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) {
-      if (!nameNodeWhitelist.contains(namenode.toLowerCase())) {
+      if (!nameNodeWhitelist.contains(
+          StringUtils.toLowerCase(namenode))) {
         throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist");
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
index 8baef67..f95a6e6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
@@ -34,7 +34,7 @@ public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
 
   @Override
   protected E parse(String str) throws Exception {
-    return Enum.valueOf(klass, str.toUpperCase());
+    return Enum.valueOf(klass, StringUtils.toUpperCase(str));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
index 8d79b71..ba6e5aa 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
@@ -22,6 +22,7 @@ import java.util.EnumSet;
 import java.util.Iterator;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 
 @InterfaceAudience.Private
 public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>> {
@@ -37,7 +38,7 @@ public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>>
     final EnumSet<E> set = EnumSet.noneOf(klass);
     if (!str.isEmpty()) {
       for (String sub : str.split(",")) {
-        set.add(Enum.valueOf(klass, sub.trim().toUpperCase()));
+        set.add(Enum.valueOf(klass, StringUtils.toUpperCase(sub.trim())));
       }
     }
     return set;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
index 4703a90..c93f8f2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
@@ -26,6 +26,7 @@ import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
 import com.sun.jersey.spi.inject.Injectable;
 import com.sun.jersey.spi.inject.InjectableProvider;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
 
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MultivaluedMap;
@@ -70,7 +71,7 @@ public class ParametersProvider
     }
     Enum op;
     try {
-      op = Enum.valueOf(enumClass, str.toUpperCase());
+      op = Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
     } catch (IllegalArgumentException ex) {
       throw new IllegalArgumentException(
         MessageFormat.format("Invalid Operation [{0}]", str));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
index 04364ccf..5cafb3c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.fs.XAttr.NameSpace;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -57,16 +58,20 @@ public class XAttrHelper {
     }
     
     NameSpace ns;
-    final String prefix = name.substring(0, prefixIndex).toLowerCase();
-    if (prefix.equals(NameSpace.USER.toString().toLowerCase())) {
+    final String prefix = name.substring(0, prefixIndex);
+    if (StringUtils.equalsIgnoreCase(prefix, NameSpace.USER.toString())) {
       ns = NameSpace.USER;
-    } else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.TRUSTED.toString())) {
       ns = NameSpace.TRUSTED;
-    } else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.SYSTEM.toString())) {
       ns = NameSpace.SYSTEM;
-    } else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.SECURITY.toString())) {
       ns = NameSpace.SECURITY;
-    } else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) {
+    } else if (
+        StringUtils.equalsIgnoreCase(prefix, NameSpace.RAW.toString())) {
       ns = NameSpace.RAW;
     } else {
       throw new HadoopIllegalArgumentException("An XAttr name must be " +
@@ -145,7 +150,7 @@ public class XAttrHelper {
     }
     
     String namespace = xAttr.getNameSpace().toString();
-    return namespace.toLowerCase() + "." + xAttr.getName();
+    return StringUtils.toLowerCase(namespace) + "." + xAttr.getName();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
index ecd7a43..4e95329 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeLayoutVersion;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
+import org.apache.hadoop.util.StringUtils;
 
 /************************************
  * Some handy constants
@@ -100,7 +101,7 @@ public class HdfsConstants {
 
     /** Covert the given String to a RollingUpgradeAction. */
     public static RollingUpgradeAction fromString(String s) {
-      return MAP.get(s.toUpperCase());
+      return MAP.get(StringUtils.toUpperCase(s));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
index 39a4076..6fee219 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.XAttr;
 import org.apache.hadoop.hdfs.XAttrHelper;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
+import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -131,7 +132,8 @@ public class BlockStoragePolicySuite {
   }
 
   public static String buildXAttrName() {
-    return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME;
+    return StringUtils.toLowerCase(XAttrNS.toString())
+        + "." + STORAGE_POLICY_XATTR_NAME;
   }
 
   public static XAttr buildXAttr(byte policyId) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
index 9bba2c9..8af3af7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext;
 
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.util.StringUtils;
 
 /************************************
  * Some handy internal HDFS constants
@@ -53,7 +54,7 @@ public final class HdfsServerConstants {
 
     public String getOptionString() {
       return StartupOption.ROLLINGUPGRADE.getName() + " "
-          + name().toLowerCase();
+          + StringUtils.toLowerCase(name());
     }
 
     public boolean matches(StartupOption option) {
@@ -76,7 +77,7 @@ public final class HdfsServerConstants {
     public static String getAllOptionString() {
       final StringBuilder b = new StringBuilder("<");
       for(RollingUpgradeStartupOption opt : VALUES) {
-        b.append(opt.name().toLowerCase()).append("|");
+        b.append(StringUtils.toLowerCase(opt.name())).append("|");
       }
       b.setCharAt(b.length() - 1, '>');
       return b.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
index 7cda670..126086f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
@@ -28,6 +28,7 @@ import java.util.regex.Matcher;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.server.common.Util;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Encapsulates the URI and storage medium that together describe a
@@ -88,7 +89,8 @@ public class StorageLocation {
       String classString = matcher.group(1);
       location = matcher.group(2);
       if (!classString.isEmpty()) {
-        storageType = StorageType.valueOf(classString.toUpperCase());
+        storageType =
+            StorageType.valueOf(StringUtils.toUpperCase(classString));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
index 7a62663..b75dee3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
@@ -123,6 +123,7 @@ import org.apache.hadoop.ipc.ClientId;
 import org.apache.hadoop.ipc.RpcConstants;
 import org.apache.hadoop.security.token.delegation.DelegationKey;
 import org.apache.hadoop.util.DataChecksum;
+import org.apache.hadoop.util.StringUtils;
 import org.xml.sax.ContentHandler;
 import org.xml.sax.SAXException;
 import org.xml.sax.helpers.AttributesImpl;
@@ -4350,7 +4351,7 @@ public abstract class FSEditLogOp {
 
     public RollingUpgradeOp(FSEditLogOpCodes code, String name) {
       super(code);
-      this.name = name.toUpperCase();
+      this.name = StringUtils.toUpperCase(name);
     }
 
     static RollingUpgradeOp getStartInstance(OpInstanceCache cache) {