You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by oz...@apache.org on 2015/03/09 11:55:24 UTC
[2/3] hadoop git commit: HADOOP-11602. Backport 'Fix
toUpperCase/toLowerCase to use Locale.ENGLISH.' (ozawa)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
index ddd8a1a..cf59c9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Objects;
import org.apache.hadoop.fs.StorageType;
-import java.util.Locale;
+import org.apache.hadoop.util.StringUtils;
public class QuotaByStorageTypeEntry {
private StorageType type;
@@ -54,7 +54,7 @@ import java.util.Locale;
public String toString() {
StringBuilder sb = new StringBuilder();
assert (type != null);
- sb.append(type.toString().toLowerCase());
+ sb.append(StringUtils.toLowerCase(type.toString()));
sb.append(':');
sb.append(quota);
return sb.toString();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
index 83e6426..ec7e0c9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
@@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable,
return 0;
}
- String cmd = opts.getCommand().toString().toLowerCase();
+ String cmd = StringUtils.toLowerCase(opts.getCommand().toString());
int exitCode = 0;
try {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
index 92a16cd..e6cf16c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@@ -79,19 +80,19 @@ public class GetConf extends Configured implements Tool {
private static final Map<String, CommandHandler> map;
static {
map = new HashMap<String, CommandHandler>();
- map.put(NAMENODE.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(NAMENODE.getName()),
new NameNodesCommandHandler());
- map.put(SECONDARY.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(SECONDARY.getName()),
new SecondaryNameNodesCommandHandler());
- map.put(BACKUP.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(BACKUP.getName()),
new BackupNodesCommandHandler());
- map.put(INCLUDE_FILE.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()),
new CommandHandler(DFSConfigKeys.DFS_HOSTS));
- map.put(EXCLUDE_FILE.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()),
new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
- map.put(NNRPCADDRESSES.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()),
new NNRpcAddressesCommandHandler());
- map.put(CONFKEY.getName().toLowerCase(),
+ map.put(StringUtils.toLowerCase(CONFKEY.getName()),
new PrintConfKeyCommandHandler());
}
@@ -116,7 +117,7 @@ public class GetConf extends Configured implements Tool {
}
public static CommandHandler getHandler(String cmd) {
- return map.get(cmd.toLowerCase());
+ return map.get(StringUtils.toLowerCase(cmd));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
index c4b8424..de3aceb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
@@ -24,6 +24,7 @@ import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
/**
* EditsVisitorFactory for different implementations of EditsVisitor
@@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory {
*/
static public OfflineEditsVisitor getEditsVisitor(String filename,
String processor, boolean printToScreen) throws IOException {
- if(processor.toLowerCase().equals("binary")) {
+ if(StringUtils.equalsIgnoreCase("binary", processor)) {
return new BinaryEditsVisitor(filename);
}
OfflineEditsVisitor vis;
@@ -59,9 +60,9 @@ public class OfflineEditsVisitorFactory {
outs[1] = System.out;
out = new TeeOutputStream(outs);
}
- if(processor.toLowerCase().equals("xml")) {
+ if(StringUtils.equalsIgnoreCase("xml", processor)) {
vis = new XmlEditsVisitor(out);
- } else if(processor.toLowerCase().equals("stats")) {
+ } else if(StringUtils.equalsIgnoreCase("stats", processor)) {
vis = new StatisticsEditsVisitor(out);
} else {
throw new IOException("Unknown proccesor " + processor +
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
index 43fcd69..429b6fc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
@@ -33,6 +33,7 @@ import io.netty.handler.codec.http.QueryStringDecoder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.util.StringUtils;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -51,6 +52,7 @@ import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.APPLICATION_JSON_UTF8;
import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX;
import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX_LENGTH;
+
/**
* Implement the read-only WebHDFS API for fsimage.
*/
@@ -141,7 +143,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
private static String getOp(QueryStringDecoder decoder) {
Map<String, List<String>> parameters = decoder.parameters();
return parameters.containsKey("op")
- ? parameters.get("op").get(0).toUpperCase() : null;
+ ? StringUtils.toUpperCase(parameters.get("op").get(0)) : null;
}
private static String getPath(QueryStringDecoder decoder)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
index b6ff4b6..5ad1f24 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.apache.hadoop.util.StringUtils;
/**
* Subclass of {@link AuthenticationFilter} that
@@ -96,7 +97,7 @@ public class AuthFilter extends AuthenticationFilter {
final Map<String, List<String>> m = new HashMap<String, List<String>>();
for(Map.Entry<String, String[]> entry : original.entrySet()) {
- final String key = entry.getKey().toLowerCase();
+ final String key = StringUtils.toLowerCase(entry.getKey());
List<String> strings = m.get(key);
if (strings == null) {
strings = new ArrayList<String>();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
index 2ae3445..febe125 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
@@ -28,6 +28,7 @@ import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
+import org.apache.hadoop.util.StringUtils;
/**
* A filter to change parameter names to lower cases
@@ -75,7 +76,7 @@ public class ParamFilter implements ResourceFilter {
final MultivaluedMap<String, String> parameters) {
UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
- final String key = e.getKey().toLowerCase();
+ final String key = StringUtils.toLowerCase(e.getKey());
for(String v : e.getValue()) {
b = b.queryParam(key, v);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 3ca89f5..eea133b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -1243,7 +1243,7 @@ public class WebHdfsFileSystem extends FileSystem
if (query == null) {
return url;
}
- final String lower = query.toLowerCase();
+ final String lower = StringUtils.toLowerCase(query);
if (!lower.startsWith(OFFSET_PARAM_PREFIX)
&& !lower.contains("&" + OFFSET_PARAM_PREFIX)) {
return url;
@@ -1254,7 +1254,7 @@ public class WebHdfsFileSystem extends FileSystem
for(final StringTokenizer st = new StringTokenizer(query, "&");
st.hasMoreTokens();) {
final String token = st.nextToken();
- if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) {
+ if (!StringUtils.toLowerCase(token).startsWith(OFFSET_PARAM_PREFIX)) {
if (b == null) {
b = new StringBuilder("?").append(token);
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
index 1703e3b..60d201b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.web.resources;
import java.util.Arrays;
+import org.apache.hadoop.util.StringUtils;
abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>> {
EnumParam(final Domain<E> domain, final E value) {
@@ -40,7 +41,7 @@ abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>
@Override
final E parse(final String str) {
- return Enum.valueOf(enumClass, str.toUpperCase());
+ return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
index 5adb5a6..c2dfadf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.Iterator;
+import org.apache.hadoop.util.StringUtils;
abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSetParam.Domain<E>> {
/** Convert an EnumSet to a string of comma separated values. */
@@ -82,7 +83,7 @@ abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSet
i = j > 0 ? j + 1 : 0;
j = str.indexOf(',', i);
final String sub = j >= 0? str.substring(i, j): str.substring(i);
- set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase()));
+ set.add(Enum.valueOf(enumClass, StringUtils.toUpperCase(sub.trim())));
}
}
return set;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
index ac6acf9..b439a28 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hdfs.server.namenode.snapshot;
import static org.mockito.Matchers.anyObject;
-import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
@@ -31,6 +30,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
+import org.apache.hadoop.util.StringUtils;
import org.junit.Assert;
import org.junit.Test;
@@ -70,7 +70,7 @@ public class TestSnapshotManager {
Assert.fail("Expected SnapshotException not thrown");
} catch (SnapshotException se) {
Assert.assertTrue(
- se.getMessage().toLowerCase().contains("rollover"));
+ StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
}
// Delete a snapshot to free up a slot.
@@ -86,7 +86,7 @@ public class TestSnapshotManager {
Assert.fail("Expected SnapshotException not thrown");
} catch (SnapshotException se) {
Assert.assertTrue(
- se.getMessage().toLowerCase().contains("rollover"));
+ StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index aad63d3..a0e7041 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
import org.apache.hadoop.yarn.client.api.TimelineClient;
@@ -711,7 +712,7 @@ public class JobHistoryEventHandler extends AbstractService
private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
long timestamp) {
TimelineEvent tEvent = new TimelineEvent();
- tEvent.setEventType(event.getEventType().name().toUpperCase());
+ tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name()));
tEvent.setTimestamp(timestamp);
TimelineEntity tEntity = new TimelineEntity();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
index 53f21db..0f528e4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
@@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
import java.io.IOException;
import java.net.URLDecoder;
-import java.util.Locale;
import javax.servlet.http.HttpServletResponse;
@@ -226,8 +225,9 @@ public class AppController extends Controller implements AMParams {
if (app.getJob() != null) {
try {
String tt = $(TASK_TYPE);
- tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
- toString().toLowerCase(Locale.US));
+ tt = tt.isEmpty() ? "All" : StringUtils.capitalize(
+ org.apache.hadoop.util.StringUtils.toLowerCase(
+ MRApps.taskType(tt).toString()));
setTitle(join(tt, " Tasks for ", $(JOB_ID)));
} catch (Exception e) {
LOG.error("Failed to render tasks page with task type : "
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
index 553ba70..5b8d3a7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
@@ -314,7 +315,7 @@ public class TypeConverter {
QueueState state) {
org.apache.hadoop.mapreduce.QueueState qState =
org.apache.hadoop.mapreduce.QueueState.getState(
- state.toString().toLowerCase());
+ StringUtils.toLowerCase(state.toString()));
return qState;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
index 3919c42..876e555 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
@@ -303,7 +303,7 @@ public class MRApps extends Apps {
remoteFS.getWorkingDirectory()));
String name = (null == u.getFragment())
? p.getName() : u.getFragment();
- if (!name.toLowerCase().endsWith(".jar")) {
+ if (!StringUtils.toLowerCase(name).endsWith(".jar")) {
linkLookup.put(p, name);
}
}
@@ -317,7 +317,7 @@ public class MRApps extends Apps {
if (name == null) {
name = p.getName();
}
- if(!name.toLowerCase().endsWith(".jar")) {
+ if(!StringUtils.toLowerCase(name).endsWith(".jar")) {
MRApps.addToEnvironment(
environment,
classpathEnvVar,
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
index cc42b9c..e36efec 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce;
+import org.apache.hadoop.util.StringUtils;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -151,9 +152,10 @@ public class TestTypeConverter {
.newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class);
queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
org.apache.hadoop.mapreduce.QueueInfo returned =
- TypeConverter.fromYarn(queueInfo, new Configuration());
+ TypeConverter.fromYarn(queueInfo, new Configuration());
Assert.assertEquals("queueInfo translation didn't work.",
- returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase());
+ returned.getState().toString(),
+ StringUtils.toLowerCase(queueInfo.getQueueState().toString()));
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
index 7710ba7..1ea1666 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
@@ -115,7 +115,7 @@ abstract public class Task implements Writable, Configurable {
* BYTES_READ counter and second one is of the BYTES_WRITTEN counter.
*/
protected static String[] getFileSystemCounterNames(String uriScheme) {
- String scheme = uriScheme.toUpperCase();
+ String scheme = StringUtils.toUpperCase(uriScheme);
return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"};
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
index a53b76a..e0e5b79 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
@@ -25,7 +25,6 @@ import java.util.Arrays;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.Iterator;
-import java.util.Locale;
import java.util.Map;
import com.google.common.base.Joiner;
@@ -42,6 +41,7 @@ import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.FileSystemCounter;
import org.apache.hadoop.mapreduce.util.ResourceBundles;
+import org.apache.hadoop.util.StringUtils;
/**
* An abstract class to provide common implementation of the filesystem
@@ -227,7 +227,7 @@ public abstract class FileSystemCounterGroup<C extends Counter>
}
private String checkScheme(String scheme) {
- String fixed = scheme.toUpperCase(Locale.US);
+ String fixed = StringUtils.toUpperCase(scheme);
String interned = schemes.putIfAbsent(fixed, fixed);
if (schemes.size() > MAX_NUM_SCHEMES) {
// mistakes or abuses
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
index 86a57d9..51fe69a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
@@ -470,7 +470,7 @@ public class DistributedCache {
if (fragment == null) {
return false;
}
- String lowerCaseFragment = fragment.toLowerCase();
+ String lowerCaseFragment = StringUtils.toLowerCase(fragment);
if (fragments.contains(lowerCaseFragment)) {
return false;
}
@@ -485,7 +485,7 @@ public class DistributedCache {
if (fragment == null) {
return false;
}
- String lowerCaseFragment = fragment.toLowerCase();
+ String lowerCaseFragment = StringUtils.toLowerCase(fragment);
if (fragments.contains(lowerCaseFragment)) {
return false;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
index f193374..78c3a0f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
@@ -45,6 +45,8 @@ import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.util.StringUtils;
+
/**
* A InputFormat that reads input data from an SQL table.
* <p>
@@ -162,7 +164,8 @@ public class DBInputFormat<T extends DBWritable>
this.connection = createConnection();
DatabaseMetaData dbMeta = connection.getMetaData();
- this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
+ this.dbProductName =
+ StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
}
catch (Exception ex) {
throw new RuntimeException(ex);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
index 04bd867..b024cb4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
@@ -222,12 +222,14 @@ public class CLI extends Configured implements Tool {
taskType = argv[2];
taskState = argv[3];
displayTasks = true;
- if (!taskTypes.contains(taskType.toUpperCase())) {
+ if (!taskTypes.contains(
+ org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
System.out.println("Error: Invalid task-type: " + taskType);
displayUsage(cmd);
return exitCode;
}
- if (!taskStates.contains(taskState.toLowerCase())) {
+ if (!taskStates.contains(
+ org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
System.out.println("Error: Invalid task-state: " + taskState);
displayUsage(cmd);
return exitCode;
@@ -588,7 +590,8 @@ public class CLI extends Configured implements Tool {
*/
protected void displayTasks(Job job, String type, String state)
throws IOException, InterruptedException {
- TaskReport[] reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase()));
+ TaskReport[] reports = job.getTaskReports(TaskType.valueOf(
+ org.apache.hadoop.util.StringUtils.toUpperCase(type)));
for (TaskReport report : reports) {
TIPStatus status = report.getCurrentStatus();
if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) ||
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
index f85a2ee..53997e7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
@@ -155,16 +155,16 @@ public class TestDFSIO implements Tool {
static ByteMultiple parseString(String sMultiple) {
if(sMultiple == null || sMultiple.isEmpty()) // MB by default
return MB;
- String sMU = sMultiple.toUpperCase();
- if(B.name().toUpperCase().endsWith(sMU))
+ String sMU = StringUtils.toUpperCase(sMultiple);
+ if(StringUtils.toUpperCase(B.name()).endsWith(sMU))
return B;
- if(KB.name().toUpperCase().endsWith(sMU))
+ if(StringUtils.toUpperCase(KB.name()).endsWith(sMU))
return KB;
- if(MB.name().toUpperCase().endsWith(sMU))
+ if(StringUtils.toUpperCase(MB.name()).endsWith(sMU))
return MB;
- if(GB.name().toUpperCase().endsWith(sMU))
+ if(StringUtils.toUpperCase(GB.name()).endsWith(sMU))
return GB;
- if(TB.name().toUpperCase().endsWith(sMU))
+ if(StringUtils.toUpperCase(TB.name()).endsWith(sMU))
return TB;
throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
index 60c1ba6..dc78124 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.lib.LongSumReducer;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
public class TestFileSystem extends TestCase {
private static final Log LOG = FileSystem.LOG;
@@ -556,7 +557,8 @@ public class TestFileSystem extends TestCase {
static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException {
InetSocketAddress add = cluster.getNameNode().getNameNodeAddress();
// Test upper/lower case
- fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort()));
+ fileSys.checkPath(new Path("hdfs://"
+ + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
}
public void testFsClose() throws Exception {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
index 0642052..57a7163 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.fs.slive;
+import org.apache.hadoop.util.StringUtils;
+
/**
* Constants used in various places in slive
*/
@@ -35,7 +37,7 @@ class Constants {
enum Distribution {
BEG, END, UNIFORM, MID;
String lowerName() {
- return this.name().toLowerCase();
+ return StringUtils.toLowerCase(this.name());
}
}
@@ -45,7 +47,7 @@ class Constants {
enum OperationType {
READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE, TRUNCATE;
String lowerName() {
- return this.name().toLowerCase();
+ return StringUtils.toLowerCase(this.name());
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
index b4c98f7..02eca37 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.fs.slive;
import org.apache.hadoop.fs.slive.Constants.Distribution;
+import org.apache.hadoop.util.StringUtils;
/**
* This class holds the data representing what an operations distribution and
@@ -52,7 +53,7 @@ class OperationData {
percent = (Double.parseDouble(pieces[0]) / 100.0d);
} else if (pieces.length >= 2) {
percent = (Double.parseDouble(pieces[0]) / 100.0d);
- distribution = Distribution.valueOf(pieces[1].toUpperCase());
+ distribution = Distribution.valueOf(StringUtils.toUpperCase(pieces[1]));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
index 57ef017..bca5a1c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.fs.slive;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.StringUtils;
/**
* An operation output has the following object format whereby simple types are
@@ -67,7 +68,8 @@ class OperationOutput {
"Invalid key format - no type seperator - " + TYPE_SEP);
}
try {
- dataType = OutputType.valueOf(key.substring(0, place).toUpperCase());
+ dataType = OutputType.valueOf(
+ StringUtils.toUpperCase(key.substring(0, place)));
} catch (Exception e) {
throw new IllegalArgumentException(
"Invalid key format - invalid output type", e);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
index ce1837f..97360d6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextOutputFormat;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@@ -157,7 +158,7 @@ public class SliveTest implements Tool {
if (val == null) {
return false;
}
- String cleanupOpt = val.toLowerCase().trim();
+ String cleanupOpt = StringUtils.toLowerCase(val).trim();
if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) {
return true;
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
index f155dae..0a9d0e9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@@ -214,23 +215,25 @@ public class FileBench extends Configured implements Tool {
if (!(fmt == Format.txt || cod == CCodec.pln)) {
for (CType typ : ct) {
String fn =
- fmt.name().toUpperCase() + "_" +
- cod.name().toUpperCase() + "_" +
- typ.name().toUpperCase();
+ StringUtils.toUpperCase(fmt.name()) + "_" +
+ StringUtils.toUpperCase(cod.name()) + "_" +
+ StringUtils.toUpperCase(typ.name());
typ.configure(job);
- System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+ System.out.print(
+ StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
System.out.println(rwop.exec(fn, job) / 1000 +
" seconds");
}
} else {
String fn =
- fmt.name().toUpperCase() + "_" +
- cod.name().toUpperCase();
+ StringUtils.toUpperCase(fmt.name()) + "_" +
+ StringUtils.toUpperCase(cod.name());
Path p = new Path(root, fn);
if (rwop == RW.r && !fs.exists(p)) {
fn += cod.getExt();
}
- System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+ System.out.print(
+ StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
System.out.println(rwop.exec(fn, job) / 1000 +
" seconds");
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
index 02a083b..d60905e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
@@ -280,7 +281,7 @@ public class TestMapRed extends Configured implements Tool {
public void map(WritableComparable key, Text value,
OutputCollector<Text, Text> output,
Reporter reporter) throws IOException {
- String str = value.toString().toLowerCase();
+ String str = StringUtils.toLowerCase(value.toString());
output.collect(new Text(str), value);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
index 5850242..1ec8739 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
@@ -102,7 +102,7 @@ public class DBCountPageView extends Configured implements Tool {
private void createConnection(String driverClassName
, String url) throws Exception {
-
+
Class.forName(driverClassName);
connection = DriverManager.getConnection(url);
connection.setAutoCommit(false);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
----------------------------------------------------------------------
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
index f342463..b6a45ec 100644
--- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
@@ -13,6 +13,7 @@
*/
package org.apache.hadoop.maven.plugin.versioninfo;
+import java.util.Locale;
import org.apache.hadoop.maven.plugin.util.Exec;
import org.apache.hadoop.maven.plugin.util.FileSetUtils;
import org.apache.maven.model.FileSet;
@@ -329,7 +330,8 @@ public class VersionInfoMojo extends AbstractMojo {
}
private String normalizePath(File file) {
- return file.getPath().toUpperCase().replaceAll("\\\\", "/");
+ return file.getPath().toUpperCase(Locale.ENGLISH)
+ .replaceAll("\\\\", "/");
}
});
byte[] md5 = computeMD5(files);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
index 83c2ce5..b664fe7 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
@@ -984,8 +984,8 @@ public class AzureNativeFileSystemStore implements NativeFileSystemStore {
private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException {
URI asUri = new URI(rawDir);
if (asUri.getAuthority() == null
- || asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase(
- sessionUri.getAuthority().toLowerCase(Locale.US))) {
+ || asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase(
+ sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) {
// Applies to me.
return trim(asUri.getPath(), "/");
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
index edb6b91..20fdf11 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
import org.apache.hadoop.tools.mapred.UniformSizeInputFormat;
import com.google.common.collect.Maps;
+import org.apache.hadoop.util.StringUtils;
/**
* Utility functions used in DistCp.
@@ -121,8 +122,9 @@ public class DistCpUtils {
*/
public static Class<? extends InputFormat> getStrategy(Configuration conf,
DistCpOptions options) {
- String confLabel = "distcp." +
- options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl";
+ String confLabel = "distcp."
+ + StringUtils.toLowerCase(options.getCopyStrategy())
+ + ".strategy" + ".impl";
return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
}
@@ -221,7 +223,8 @@ public class DistCpUtils {
final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
if (preserveXAttrs || preserveRawXattrs) {
- final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+ final String rawNS =
+ StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
@@ -321,7 +324,8 @@ public class DistCpUtils {
copyListingFileStatus.setXAttrs(srcXAttrs);
} else {
Map<String, byte[]> trgXAttrs = Maps.newHashMap();
- final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+ final String rawNS =
+ StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
final String xattrName = ent.getKey();
if (xattrName.startsWith(rawNS)) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
index f46c421..8a6819b 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
@@ -169,7 +169,9 @@ public class DistCpV1 implements Tool {
final char symbol;
- private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
+ private FileAttribute() {
+ symbol = StringUtils.toLowerCase(toString()).charAt(0);
+ }
static EnumSet<FileAttribute> parse(String s) {
if (s == null || s.length() == 0) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
index 83eb947..b803538 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
import java.util.concurrent.CountDownLatch;
import java.io.IOException;
+import org.apache.hadoop.util.StringUtils;
enum GridmixJobSubmissionPolicy {
@@ -84,6 +85,6 @@ enum GridmixJobSubmissionPolicy {
public static GridmixJobSubmissionPolicy getPolicy(
Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) {
String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name());
- return valueOf(policy.toUpperCase());
+ return valueOf(StringUtils.toUpperCase(policy));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
index 7a35b46..967929b 100644
--- a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
+++ b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java
@@ -27,12 +27,12 @@ import org.apache.hadoop.fs.swift.http.RestClientBindings;
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
-import java.util.Locale;
public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest {
@@ -115,7 +115,7 @@ public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest
public void testFilesystemIsCaseSensitive() throws Exception {
String mixedCaseFilename = "/test/UPPER.TXT";
Path upper = path(mixedCaseFilename);
- Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH));
+ Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
assertFalse("File exists" + upper, fs.exists(upper));
assertFalse("File exists" + lower, fs.exists(lower));
FSDataOutputStream out = fs.create(upper);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
index 47fdb1a..c53a7c2 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
@@ -38,6 +38,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.LineReader;
@@ -319,42 +320,42 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
}
for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) {
- if ("-h".equals(args[i].toLowerCase())
- || "-help".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-h", args[i])
+ || StringUtils.equalsIgnoreCase("-help", args[i])) {
usage();
return 0;
}
- if ("-c".equals(args[i].toLowerCase())
- || "-collect-prefixes".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-c", args[i])
+ || StringUtils.equalsIgnoreCase("-collect-prefixes", args[i])) {
collecting = true;
continue;
}
// these control the job digest
- if ("-write-job-trace".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-write-job-trace", args[i])) {
++i;
jobTraceFilename = new Path(args[i]);
continue;
}
- if ("-single-line-job-traces".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-single-line-job-traces", args[i])) {
prettyprintTrace = false;
continue;
}
- if ("-omit-task-details".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-omit-task-details", args[i])) {
omitTaskDetails = true;
continue;
}
- if ("-write-topology".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-write-topology", args[i])) {
++i;
topologyFilename = new Path(args[i]);
continue;
}
- if ("-job-digest-spectra".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-job-digest-spectra", args[i])) {
ArrayList<Integer> values = new ArrayList<Integer>();
++i;
@@ -384,13 +385,13 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
continue;
}
- if ("-d".equals(args[i].toLowerCase())
- || "-debug".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-d", args[i])
+ || StringUtils.equalsIgnoreCase("-debug", args[i])) {
debug = true;
continue;
}
- if ("-spreads".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-spreads", args[i])) {
int min = Integer.parseInt(args[i + 1]);
int max = Integer.parseInt(args[i + 2]);
@@ -404,22 +405,22 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
}
// These control log-wide CDF outputs
- if ("-delays".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-delays", args[i])) {
delays = true;
continue;
}
- if ("-runtimes".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-runtimes", args[i])) {
runtimes = true;
continue;
}
- if ("-tasktimes".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-tasktimes", args[i])) {
collectTaskTimes = true;
continue;
}
- if ("-v1".equals(args[i].toLowerCase())) {
+ if (StringUtils.equalsIgnoreCase("-v1", args[i])) {
version = 1;
continue;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
index eaa9547..c5ae2fc 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
@@ -433,7 +433,7 @@ public class JobBuilder {
return Values.SUCCESS;
}
- return Values.valueOf(name.toUpperCase());
+ return Values.valueOf(StringUtils.toUpperCase(name));
}
private void processTaskUpdatedEvent(TaskUpdatedEvent event) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
index 903d5fb..4a23fa6 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounter;
import org.apache.hadoop.mapreduce.jobhistory.JhCounterGroup;
import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
+import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.annotate.JsonAnySetter;
/**
@@ -243,7 +244,7 @@ public class LoggedTask implements DeepCompare {
}
private static String canonicalizeCounterName(String nonCanonicalName) {
- String result = nonCanonicalName.toLowerCase();
+ String result = StringUtils.toLowerCase(nonCanonicalName);
result = result.replace(' ', '|');
result = result.replace('-', '|');
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
index d1b365e..c21eb39 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
@@ -23,6 +23,7 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
+import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.annotate.JsonAnySetter;
// HACK ALERT!!! This "should" have have two subclasses, which might be called
@@ -611,7 +612,7 @@ public class LoggedTaskAttempt implements DeepCompare {
}
private static String canonicalizeCounterName(String nonCanonicalName) {
- String result = nonCanonicalName.toLowerCase();
+ String result = StringUtils.toLowerCase(nonCanonicalName);
result = result.replace(' ', '|');
result = result.replace('-', '|');
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
index 98d8aa03..bc92b71 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
@@ -25,6 +25,7 @@ import java.util.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.StringUtils;
/**
* This is a class used to get the current environment
@@ -43,7 +44,7 @@ public class Environment extends Properties {
// http://lopica.sourceforge.net/os.html
String command = null;
String OS = System.getProperty("os.name");
- String lowerOs = OS.toLowerCase();
+ String lowerOs = StringUtils.toLowerCase(OS);
if (OS.indexOf("Windows") > -1) {
command = "cmd /C set";
} else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
index de8f740..108ad0b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
@@ -36,6 +36,7 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -173,7 +174,7 @@ public class ApplicationCLI extends YarnCLI {
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
- appTypes.add(type.toUpperCase().trim());
+ appTypes.add(StringUtils.toUpperCase(type).trim());
}
}
}
@@ -191,8 +192,8 @@ public class ApplicationCLI extends YarnCLI {
break;
}
try {
- appStates.add(YarnApplicationState.valueOf(state
- .toUpperCase().trim()));
+ appStates.add(YarnApplicationState.valueOf(
+ StringUtils.toUpperCase(state).trim()));
} catch (IllegalArgumentException ex) {
sysout.println("The application state " + state
+ " is invalid.");
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index d603626..4f0ddfe 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -111,7 +111,8 @@ public class NodeCLI extends YarnCLI {
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
- nodeStates.add(NodeState.valueOf(type.trim().toUpperCase()));
+ nodeStates.add(NodeState.valueOf(
+ org.apache.hadoop.util.StringUtils.toUpperCase(type.trim())));
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
index a8996f0..ad009d6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
@@ -26,6 +26,7 @@ import java.util.Set;
import org.apache.commons.lang.math.LongRange;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
@@ -213,7 +214,7 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
// Convert applicationTags to lower case and add
this.applicationTags = new HashSet<String>();
for (String tag : tags) {
- this.applicationTags.add(tag.toLowerCase());
+ this.applicationTags.add(StringUtils.toLowerCase(tag));
}
}
@@ -258,7 +259,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
public void setApplicationStates(Set<String> applicationStates) {
EnumSet<YarnApplicationState> appStates = null;
for (YarnApplicationState state : YarnApplicationState.values()) {
- if (applicationStates.contains(state.name().toLowerCase())) {
+ if (applicationStates.contains(
+ StringUtils.toLowerCase(state.name()))) {
if (appStates == null) {
appStates = EnumSet.of(state);
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
index 303b437..67e3a84 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
@@ -23,6 +23,7 @@ import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
@@ -291,7 +292,7 @@ extends ApplicationSubmissionContext {
// Convert applicationTags to lower case and add
this.applicationTags = new HashSet<String>();
for (String tag : tags) {
- this.applicationTags.add(tag.toLowerCase());
+ this.applicationTags.add(StringUtils.toLowerCase(tag));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
index 870aa95..bd9c907 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java
@@ -23,7 +23,6 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.PrivilegedExceptionAction;
-import java.util.Locale;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
@@ -47,6 +46,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.RunJar;
import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
@@ -272,7 +272,7 @@ public class FSDownload implements Callable<Path> {
private long unpack(File localrsrc, File dst) throws IOException {
switch (resource.getType()) {
case ARCHIVE: {
- String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
+ String lowerDst = StringUtils.toLowerCase(dst.getName());
if (lowerDst.endsWith(".jar")) {
RunJar.unJar(localrsrc, dst);
} else if (lowerDst.endsWith(".zip")) {
@@ -291,7 +291,7 @@ public class FSDownload implements Callable<Path> {
}
break;
case PATTERN: {
- String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
+ String lowerDst = StringUtils.toLowerCase(dst.getName());
if (lowerDst.endsWith(".jar")) {
String p = resource.getPattern();
RunJar.unJar(localrsrc, dst,
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
index c848828..5acb3f3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
@@ -26,7 +26,6 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
-import java.util.Locale;
import java.util.Set;
import java.util.regex.Pattern;
@@ -35,6 +34,7 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.webapp.WebAppException;
import org.slf4j.Logger;
@@ -241,7 +241,7 @@ public class HamletGen {
puts(indent, "\n",
"private <T extends _> ", retName, "<T> ", methodName,
"_(T e, boolean inline) {\n",
- " return new ", retName, "<T>(\"", retName.toLowerCase(Locale.US),
+ " return new ", retName, "<T>(\"", StringUtils.toLowerCase(retName),
"\", e, opt(", !endTagOptional.contains(retName), ", inline, ",
retName.equals("PRE"), ")); }");
}
@@ -258,7 +258,7 @@ public class HamletGen {
puts(0, ") {");
puts(indent,
topMode ? "" : " closeAttrs();\n",
- " return ", retName.toLowerCase(Locale.US), "_(this, ",
+ " return ", StringUtils.toLowerCase(retName), "_" + "(this, ",
isInline(className, retName), ");\n", "}");
} else if (params.length == 1) {
puts(0, "String selector) {");
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
index 68dc84e..06a56d8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
@@ -88,7 +88,8 @@ public class RegistryUtils {
* @return the converted username
*/
public static String convertUsername(String username) {
- String converted= username.toLowerCase(Locale.ENGLISH);
+ String converted =
+ org.apache.hadoop.util.StringUtils.toLowerCase(username);
int atSymbol = converted.indexOf('@');
if (atSymbol > 0) {
converted = converted.substring(0, atSymbol);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index 2faba5f..9edc9ab 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -31,6 +31,7 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
import org.apache.hadoop.yarn.server.webapp.WebServices;
@@ -147,7 +148,8 @@ public class AHSWebServices extends WebServices {
}
Set<String> appStates = parseQueries(statesQuery, true);
for (String appState : appStates) {
- switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
+ switch (YarnApplicationState.valueOf(
+ StringUtils.toUpperCase(appState))) {
case FINISHED:
case FAILED:
case KILLED:
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
index 0907f2c..915e3f2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
@@ -52,6 +52,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomains;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
@@ -417,7 +418,7 @@ public class TimelineWebServices {
String[] strs = str.split(delimiter);
List<Field> fieldList = new ArrayList<Field>();
for (String s : strs) {
- s = s.trim().toUpperCase();
+ s = StringUtils.toUpperCase(s.trim());
if (s.equals("EVENTS")) {
fieldList.add(Field.EVENTS);
} else if (s.equals("LASTEVENTONLY")) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b46f9e72/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
index a02b80f..909bf1d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
@@ -31,6 +31,7 @@ import javax.ws.rs.WebApplicationException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -172,9 +173,9 @@ public class WebServices {
break;
}
- if (checkAppStates
- && !appStates.contains(appReport.getYarnApplicationState().toString()
- .toLowerCase())) {
+ if (checkAppStates &&
+ !appStates.contains(StringUtils.toLowerCase(
+ appReport.getYarnApplicationState().toString()))) {
continue;
}
if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) {
@@ -194,9 +195,9 @@ public class WebServices {
continue;
}
}
- if (checkAppTypes
- && !appTypes.contains(appReport.getApplicationType().trim()
- .toLowerCase())) {
+ if (checkAppTypes &&
+ !appTypes.contains(
+ StringUtils.toLowerCase(appReport.getApplicationType().trim()))) {
continue;
}
@@ -404,7 +405,8 @@ public class WebServices {
if (isState) {
try {
// enum string is in the uppercase
- YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
+ YarnApplicationState.valueOf(
+ StringUtils.toUpperCase(paramStr.trim()));
} catch (RuntimeException e) {
YarnApplicationState[] stateArray =
YarnApplicationState.values();
@@ -414,7 +416,7 @@ public class WebServices {
+ allAppStates);
}
}
- params.add(paramStr.trim().toLowerCase());
+ params.add(StringUtils.toLowerCase(paramStr.trim()));
}
}
}