You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zh...@apache.org on 2015/02/23 20:37:05 UTC
[17/52] [abbrv] hadoop git commit: HADOOP-11602. Fix
toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index aad63d3..ca204a6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -23,6 +23,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.Locale;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
@@ -711,7 +712,8 @@ public class JobHistoryEventHandler extends AbstractService
private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
long timestamp) {
TimelineEvent tEvent = new TimelineEvent();
- tEvent.setEventType(event.getEventType().name().toUpperCase());
+ tEvent.setEventType(
+ event.getEventType().name().toUpperCase(Locale.ENGLISH));
tEvent.setTimestamp(timestamp);
TimelineEntity tEntity = new TimelineEntity();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
index 53f21db..440ff49 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
@@ -227,7 +227,7 @@ public class AppController extends Controller implements AMParams {
try {
String tt = $(TASK_TYPE);
tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
- toString().toLowerCase(Locale.US));
+ toString().toLowerCase(Locale.ENGLISH));
setTitle(join(tt, " Tasks for ", $(JOB_ID)));
} catch (Exception e) {
LOG.error("Failed to render tasks page with task type : "
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
index 553ba70..e0c4773 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobPriority;
@@ -314,7 +315,7 @@ public class TypeConverter {
QueueState state) {
org.apache.hadoop.mapreduce.QueueState qState =
org.apache.hadoop.mapreduce.QueueState.getState(
- state.toString().toLowerCase());
+ state.toString().toLowerCase(Locale.ENGLISH));
return qState;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
index 08b44f8..37cfb7a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
@@ -30,6 +30,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import com.google.common.annotations.VisibleForTesting;
@@ -303,7 +304,7 @@ public class MRApps extends Apps {
remoteFS.getWorkingDirectory()));
String name = (null == u.getFragment())
? p.getName() : u.getFragment();
- if (!name.toLowerCase().endsWith(".jar")) {
+ if (!name.toLowerCase(Locale.ENGLISH).endsWith(".jar")) {
linkLookup.put(p, name);
}
}
@@ -317,7 +318,7 @@ public class MRApps extends Apps {
if (name == null) {
name = p.getName();
}
- if(!name.toLowerCase().endsWith(".jar")) {
+ if(!name.toLowerCase(Locale.ENGLISH).endsWith(".jar")) {
MRApps.addToEnvironment(
environment,
classpathEnvVar,
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
index cc42b9c..d360811 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
@@ -22,6 +22,7 @@ import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobStatus.State;
@@ -153,7 +154,8 @@ public class TestTypeConverter {
org.apache.hadoop.mapreduce.QueueInfo returned =
TypeConverter.fromYarn(queueInfo, new Configuration());
Assert.assertEquals("queueInfo translation didn't work.",
- returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase());
+ returned.getState().toString(),
+ queueInfo.getQueueState().toString().toLowerCase(Locale.ENGLISH));
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
index 5274438..73dab4f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
@@ -28,6 +28,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -116,7 +117,7 @@ abstract public class Task implements Writable, Configurable {
* BYTES_READ counter and second one is of the BYTES_WRITTEN counter.
*/
protected static String[] getFileSystemCounterNames(String uriScheme) {
- String scheme = uriScheme.toUpperCase();
+ String scheme = uriScheme.toUpperCase(Locale.ENGLISH);
return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"};
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
index a53b76a..e8d5d1d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
@@ -227,7 +227,7 @@ public abstract class FileSystemCounterGroup<C extends Counter>
}
private String checkScheme(String scheme) {
- String fixed = scheme.toUpperCase(Locale.US);
+ String fixed = scheme.toUpperCase(Locale.ENGLISH);
String interned = schemes.putIfAbsent(fixed, fixed);
if (schemes.size() > MAX_NUM_SCHEMES) {
// mistakes or abuses
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
index eaa5af8..ef9b0a4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
@@ -473,7 +473,7 @@ public class DistributedCache {
if (fragment == null) {
return false;
}
- String lowerCaseFragment = fragment.toLowerCase();
+ String lowerCaseFragment = fragment.toLowerCase(Locale.ENGLISH);
if (fragments.contains(lowerCaseFragment)) {
return false;
}
@@ -488,7 +488,7 @@ public class DistributedCache {
if (fragment == null) {
return false;
}
- String lowerCaseFragment = fragment.toLowerCase();
+ String lowerCaseFragment = fragment.toLowerCase(Locale.ENGLISH);
if (fragments.contains(lowerCaseFragment)) {
return false;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
index 00fbeda..7faa736 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
@@ -29,6 +29,7 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -162,7 +163,8 @@ public class DBInputFormat<T extends DBWritable>
this.connection = createConnection();
DatabaseMetaData dbMeta = connection.getMetaData();
- this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
+ this.dbProductName =
+ dbMeta.getDatabaseProductName().toUpperCase(Locale.ENGLISH);
}
catch (Exception ex) {
throw new RuntimeException(ex);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
index 37ba5b7..6af815b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
@@ -22,6 +22,7 @@ import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import java.util.HashSet;
import java.util.Arrays;
@@ -222,12 +223,12 @@ public class CLI extends Configured implements Tool {
taskType = argv[2];
taskState = argv[3];
displayTasks = true;
- if (!taskTypes.contains(taskType.toUpperCase())) {
+ if (!taskTypes.contains(taskType.toUpperCase(Locale.ENGLISH))) {
System.out.println("Error: Invalid task-type: " + taskType);
displayUsage(cmd);
return exitCode;
}
- if (!taskStates.contains(taskState.toLowerCase())) {
+ if (!taskStates.contains(taskState.toLowerCase(Locale.ENGLISH))) {
System.out.println("Error: Invalid task-state: " + taskState);
displayUsage(cmd);
return exitCode;
@@ -593,7 +594,8 @@ public class CLI extends Configured implements Tool {
throws IOException, InterruptedException {
TaskReport[] reports=null;
- reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase()));
+ reports = job.getTaskReports(
+ TaskType.valueOf(type.toUpperCase(Locale.ENGLISH)));
for (TaskReport report : reports) {
TIPStatus status = report.getCurrentStatus();
if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) ||
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
index d9cd07b..9420497 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
@@ -29,6 +29,7 @@ import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.Date;
+import java.util.Locale;
import java.util.Random;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
@@ -154,16 +155,16 @@ public class TestDFSIO implements Tool {
static ByteMultiple parseString(String sMultiple) {
if(sMultiple == null || sMultiple.isEmpty()) // MB by default
return MB;
- String sMU = sMultiple.toUpperCase();
- if(B.name().toUpperCase().endsWith(sMU))
+ String sMU = sMultiple.toUpperCase(Locale.ENGLISH);
+ if(B.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return B;
- if(KB.name().toUpperCase().endsWith(sMU))
+ if(KB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return KB;
- if(MB.name().toUpperCase().endsWith(sMU))
+ if(MB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return MB;
- if(GB.name().toUpperCase().endsWith(sMU))
+ if(GB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return GB;
- if(TB.name().toUpperCase().endsWith(sMU))
+ if(TB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return TB;
throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple);
}
@@ -736,7 +737,7 @@ public class TestDFSIO implements Tool {
}
for (int i = 0; i < args.length; i++) { // parse command line
- if (args[i].toLowerCase().startsWith("-read")) {
+ if (args[i].toLowerCase(Locale.ENGLISH).startsWith("-read")) {
testType = TestType.TEST_TYPE_READ;
} else if (args[i].equalsIgnoreCase("-write")) {
testType = TestType.TEST_TYPE_WRITE;
@@ -755,9 +756,10 @@ public class TestDFSIO implements Tool {
testType = TestType.TEST_TYPE_TRUNCATE;
} else if (args[i].equalsIgnoreCase("-clean")) {
testType = TestType.TEST_TYPE_CLEANUP;
- } else if (args[i].toLowerCase().startsWith("-seq")) {
+ } else if (args[i].toLowerCase(Locale.ENGLISH).startsWith("-seq")) {
isSequential = true;
- } else if (args[i].toLowerCase().startsWith("-compression")) {
+ } else if (
+ args[i].toLowerCase(Locale.ENGLISH).startsWith("-compression")) {
compressionClass = args[++i];
} else if (args[i].equalsIgnoreCase("-nrfiles")) {
nrFiles = Integer.parseInt(args[++i]);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
index 13e27cd..e2b8985 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
@@ -24,6 +24,7 @@ import java.io.OutputStream;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collections;
+import java.util.Locale;
import java.util.Random;
import java.util.List;
import java.util.ArrayList;
@@ -556,7 +557,10 @@ public class TestFileSystem extends TestCase {
static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException {
InetSocketAddress add = cluster.getNameNode().getNameNodeAddress();
// Test upper/lower case
- fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort()));
+ fileSys.checkPath(
+ new Path("hdfs://"
+ + add.getHostName().toUpperCase(Locale.ENGLISH)
+ + ":" + add.getPort()));
}
public void testFsClose() throws Exception {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
index 6f4f442..adf5270 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.fs.slive;
+import java.util.Locale;
+
/**
* Constants used in various places in slive
*/
@@ -35,7 +37,7 @@ class Constants {
enum Distribution {
BEG, END, UNIFORM, MID;
String lowerName() {
- return this.name().toLowerCase();
+ return this.name().toLowerCase(Locale.ENGLISH);
}
}
@@ -45,7 +47,7 @@ class Constants {
enum OperationType {
READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE;
String lowerName() {
- return this.name().toLowerCase();
+ return this.name().toLowerCase(Locale.ENGLISH);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
index b4c98f7..462db86 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.slive;
+import java.util.Locale;
import org.apache.hadoop.fs.slive.Constants.Distribution;
/**
@@ -52,7 +53,8 @@ class OperationData {
percent = (Double.parseDouble(pieces[0]) / 100.0d);
} else if (pieces.length >= 2) {
percent = (Double.parseDouble(pieces[0]) / 100.0d);
- distribution = Distribution.valueOf(pieces[1].toUpperCase());
+ distribution =
+ Distribution.valueOf(pieces[1].toUpperCase(Locale.ENGLISH));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
index 57ef017..d60a607 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.slive;
+import java.util.Locale;
import org.apache.hadoop.io.Text;
/**
@@ -67,7 +68,8 @@ class OperationOutput {
"Invalid key format - no type seperator - " + TYPE_SEP);
}
try {
- dataType = OutputType.valueOf(key.substring(0, place).toUpperCase());
+ dataType = OutputType.valueOf(
+ key.substring(0, place).toUpperCase(Locale.ENGLISH));
} catch (Exception e) {
throw new IllegalArgumentException(
"Invalid key format - invalid output type", e);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
index ce1837f..443089d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
@@ -27,6 +27,7 @@ import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
@@ -157,7 +158,7 @@ public class SliveTest implements Tool {
if (val == null) {
return false;
}
- String cleanupOpt = val.toLowerCase().trim();
+ String cleanupOpt = val.toLowerCase(Locale.ENGLISH).trim();
if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) {
return true;
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
index f155dae..0830f37 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
+import java.util.Locale;
import java.util.Map;
import java.util.Random;
@@ -214,23 +215,25 @@ public class FileBench extends Configured implements Tool {
if (!(fmt == Format.txt || cod == CCodec.pln)) {
for (CType typ : ct) {
String fn =
- fmt.name().toUpperCase() + "_" +
- cod.name().toUpperCase() + "_" +
- typ.name().toUpperCase();
+ fmt.name().toUpperCase(Locale.ENGLISH) + "_" +
+ cod.name().toUpperCase(Locale.ENGLISH) + "_" +
+ typ.name().toUpperCase(Locale.ENGLISH);
typ.configure(job);
- System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+ System.out.print(
+ rwop.name().toUpperCase(Locale.ENGLISH) + " " + fn + ": ");
System.out.println(rwop.exec(fn, job) / 1000 +
" seconds");
}
} else {
String fn =
- fmt.name().toUpperCase() + "_" +
- cod.name().toUpperCase();
+ fmt.name().toUpperCase(Locale.ENGLISH) + "_" +
+ cod.name().toUpperCase(Locale.ENGLISH);
Path p = new Path(root, fn);
if (rwop == RW.r && !fs.exists(p)) {
fn += cod.getExt();
}
- System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
+ System.out.print(
+ rwop.name().toUpperCase(Locale.ENGLISH) + " " + fn + ": ");
System.out.println(rwop.exec(fn, job) / 1000 +
" seconds");
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
index 02a083b..b9bbd60 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java
@@ -28,6 +28,7 @@ import java.io.File;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.Locale;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
@@ -280,7 +281,7 @@ public class TestMapRed extends Configured implements Tool {
public void map(WritableComparable key, Text value,
OutputCollector<Text, Text> output,
Reporter reporter) throws IOException {
- String str = value.toString().toLowerCase();
+ String str = value.toString().toLowerCase(Locale.ENGLISH);
output.collect(new Text(str), value);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
index 270ddc9..4379cd5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java
@@ -27,6 +27,7 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
+import java.util.Locale;
import java.util.Random;
import org.apache.commons.logging.Log;
@@ -102,7 +103,7 @@ public class DBCountPageView extends Configured implements Tool {
private void createConnection(String driverClassName
, String url) throws Exception {
- if(driverClassName.toLowerCase().contains("oracle")) {
+ if(driverClassName.toLowerCase(Locale.ENGLISH).contains("oracle")) {
isOracle = true;
}
Class.forName(driverClassName);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
----------------------------------------------------------------------
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
index f342463..f0ec59c 100644
--- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java
@@ -35,6 +35,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
+import java.util.Locale;
import java.util.TimeZone;
/**
@@ -329,7 +330,8 @@ public class VersionInfoMojo extends AbstractMojo {
}
private String normalizePath(File file) {
- return file.getPath().toUpperCase().replaceAll("\\\\", "/");
+ return file.getPath().toUpperCase(Locale.ENGLISH)
+ .replaceAll("\\\\", "/");
}
});
byte[] md5 = computeMD5(files);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
index 2412698..bd8ca6e 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
@@ -979,8 +979,8 @@ public class AzureNativeFileSystemStore implements NativeFileSystemStore {
private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException {
URI asUri = new URI(rawDir);
if (asUri.getAuthority() == null
- || asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase(
- sessionUri.getAuthority().toLowerCase(Locale.US))) {
+ || asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase(
+ sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) {
// Applies to me.
return trim(asUri.getPath(), "/");
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
index 71e84a1..36873c7 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
@@ -121,8 +121,9 @@ public class DistCpUtils {
*/
public static Class<? extends InputFormat> getStrategy(Configuration conf,
DistCpOptions options) {
- String confLabel = "distcp." +
- options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl";
+ String confLabel = "distcp."
+ + options.getCopyStrategy().toLowerCase(Locale.ENGLISH)
+ + ".strategy.impl";
return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
}
@@ -221,7 +222,8 @@ public class DistCpUtils {
final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
if (preserveXAttrs || preserveRawXattrs) {
- final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+ final String rawNS =
+ XAttr.NameSpace.RAW.name().toLowerCase(Locale.ENGLISH);
Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
@@ -321,7 +323,8 @@ public class DistCpUtils {
copyListingFileStatus.setXAttrs(srcXAttrs);
} else {
Map<String, byte[]> trgXAttrs = Maps.newHashMap();
- final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+ final String rawNS =
+ XAttr.NameSpace.RAW.name().toLowerCase(Locale.ENGLISH);
for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
final String xattrName = ent.getKey();
if (xattrName.startsWith(rawNS)) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
index f46c421..1db17f7 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
@@ -31,6 +31,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Locale;
import java.util.Random;
import java.util.Stack;
import java.util.StringTokenizer;
@@ -169,7 +170,9 @@ public class DistCpV1 implements Tool {
final char symbol;
- private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
+ private FileAttribute() {
+ symbol = toString().toLowerCase(Locale.ENGLISH).charAt(0);
+ }
static EnumSet<FileAttribute> parse(String s) {
if (s == null || s.length() == 0) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
index 83eb947..b010725 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
import java.util.concurrent.CountDownLatch;
+import java.util.Locale;
import java.io.IOException;
enum GridmixJobSubmissionPolicy {
@@ -84,6 +85,6 @@ enum GridmixJobSubmissionPolicy {
public static GridmixJobSubmissionPolicy getPolicy(
Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) {
String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name());
- return valueOf(policy.toUpperCase());
+ return valueOf(policy.toUpperCase(Locale.ENGLISH));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
index 47fdb1a..776391d 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
@@ -28,6 +28,7 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.ArrayList;
@@ -319,42 +320,43 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
}
for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) {
- if ("-h".equals(args[i].toLowerCase())
- || "-help".equals(args[i].toLowerCase())) {
+ if ("-h".equals(args[i].toLowerCase(Locale.ENGLISH))
+ || "-help".equals(args[i].toLowerCase(Locale.ENGLISH))) {
usage();
return 0;
}
- if ("-c".equals(args[i].toLowerCase())
- || "-collect-prefixes".equals(args[i].toLowerCase())) {
+ if ("-c".equals(args[i].toLowerCase(Locale.ENGLISH))
+ || "-collect-prefixes".equals(args[i].toLowerCase(Locale.ENGLISH))) {
collecting = true;
continue;
}
// these control the job digest
- if ("-write-job-trace".equals(args[i].toLowerCase())) {
+ if ("-write-job-trace".equals(args[i].toLowerCase(Locale.ENGLISH))) {
++i;
jobTraceFilename = new Path(args[i]);
continue;
}
- if ("-single-line-job-traces".equals(args[i].toLowerCase())) {
+ if ("-single-line-job-traces".equals(
+ args[i].toLowerCase(Locale.ENGLISH))) {
prettyprintTrace = false;
continue;
}
- if ("-omit-task-details".equals(args[i].toLowerCase())) {
+ if ("-omit-task-details".equals(args[i].toLowerCase(Locale.ENGLISH))) {
omitTaskDetails = true;
continue;
}
- if ("-write-topology".equals(args[i].toLowerCase())) {
+ if ("-write-topology".equals(args[i].toLowerCase(Locale.ENGLISH))) {
++i;
topologyFilename = new Path(args[i]);
continue;
}
- if ("-job-digest-spectra".equals(args[i].toLowerCase())) {
+ if ("-job-digest-spectra".equals(args[i].toLowerCase(Locale.ENGLISH))) {
ArrayList<Integer> values = new ArrayList<Integer>();
++i;
@@ -384,13 +386,13 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
continue;
}
- if ("-d".equals(args[i].toLowerCase())
- || "-debug".equals(args[i].toLowerCase())) {
+ if ("-d".equals(args[i].toLowerCase(Locale.ENGLISH))
+ || "-debug".equals(args[i].toLowerCase(Locale.ENGLISH))) {
debug = true;
continue;
}
- if ("-spreads".equals(args[i].toLowerCase())) {
+ if ("-spreads".equals(args[i].toLowerCase(Locale.ENGLISH))) {
int min = Integer.parseInt(args[i + 1]);
int max = Integer.parseInt(args[i + 2]);
@@ -404,22 +406,22 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
}
// These control log-wide CDF outputs
- if ("-delays".equals(args[i].toLowerCase())) {
+ if ("-delays".equals(args[i].toLowerCase(Locale.ENGLISH))) {
delays = true;
continue;
}
- if ("-runtimes".equals(args[i].toLowerCase())) {
+ if ("-runtimes".equals(args[i].toLowerCase(Locale.ENGLISH))) {
runtimes = true;
continue;
}
- if ("-tasktimes".equals(args[i].toLowerCase())) {
+ if ("-tasktimes".equals(args[i].toLowerCase(Locale.ENGLISH))) {
collectTaskTimes = true;
continue;
}
- if ("-v1".equals(args[i].toLowerCase())) {
+ if ("-v1".equals(args[i].toLowerCase(Locale.ENGLISH))) {
version = 1;
continue;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
index eaa9547..4b512d2 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
@@ -433,7 +434,7 @@ public class JobBuilder {
return Values.SUCCESS;
}
- return Values.valueOf(name.toUpperCase());
+ return Values.valueOf(name.toUpperCase(Locale.ENGLISH));
}
private void processTaskUpdatedEvent(TaskUpdatedEvent event) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
index 903d5fb..068ac5b 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import java.util.TreeSet;
@@ -243,7 +244,7 @@ public class LoggedTask implements DeepCompare {
}
private static String canonicalizeCounterName(String nonCanonicalName) {
- String result = nonCanonicalName.toLowerCase();
+ String result = nonCanonicalName.toLowerCase(Locale.ENGLISH);
result = result.replace(' ', '|');
result = result.replace('-', '|');
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
index d1b365e..c7823c4 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import java.util.TreeSet;
@@ -611,7 +612,7 @@ public class LoggedTaskAttempt implements DeepCompare {
}
private static String canonicalizeCounterName(String nonCanonicalName) {
- String result = nonCanonicalName.toLowerCase();
+ String result = nonCanonicalName.toLowerCase(Locale.ENGLISH);
result = result.replace(' ', '|');
result = result.replace('-', '|');
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
index 98d8aa03..72ff3b0 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
@@ -43,7 +43,7 @@ public class Environment extends Properties {
// http://lopica.sourceforge.net/os.html
String command = null;
String OS = System.getProperty("os.name");
- String lowerOs = OS.toLowerCase();
+ String lowerOs = OS.toLowerCase(Locale.ENGLISH);
if (OS.indexOf("Windows") > -1) {
command = "cmd /C set";
} else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
index de8f740..1cb3e58 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
@@ -26,6 +26,7 @@ import java.text.DecimalFormat;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
@@ -173,7 +174,7 @@ public class ApplicationCLI extends YarnCLI {
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
- appTypes.add(type.toUpperCase().trim());
+ appTypes.add(type.toUpperCase(Locale.ENGLISH).trim());
}
}
}
@@ -192,7 +193,7 @@ public class ApplicationCLI extends YarnCLI {
}
try {
appStates.add(YarnApplicationState.valueOf(state
- .toUpperCase().trim()));
+ .toUpperCase(Locale.ENGLISH).trim()));
} catch (IllegalArgumentException ex) {
sysout.println("The application state " + state
+ " is invalid.");
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index 22c240f..a6ed9b5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -27,6 +27,7 @@ import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
@@ -110,7 +111,8 @@ public class NodeCLI extends YarnCLI {
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
- nodeStates.add(NodeState.valueOf(type.trim().toUpperCase()));
+ nodeStates.add(
+ NodeState.valueOf(type.trim().toUpperCase(Locale.ENGLISH)));
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
index a8996f0..0b39dfe 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import org.apache.commons.lang.math.LongRange;
@@ -213,7 +214,7 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
// Convert applicationTags to lower case and add
this.applicationTags = new HashSet<String>();
for (String tag : tags) {
- this.applicationTags.add(tag.toLowerCase());
+ this.applicationTags.add(tag.toLowerCase(Locale.ENGLISH));
}
}
@@ -258,7 +259,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
public void setApplicationStates(Set<String> applicationStates) {
EnumSet<YarnApplicationState> appStates = null;
for (YarnApplicationState state : YarnApplicationState.values()) {
- if (applicationStates.contains(state.name().toLowerCase())) {
+ if (applicationStates.contains(
+ state.name().toLowerCase(Locale.ENGLISH))) {
if (appStates == null) {
appStates = EnumSet.of(state);
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
index 303b437..fe89f81 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.api.records.impl.pb;
import java.util.HashSet;
+import java.util.Locale;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -291,7 +292,7 @@ extends ApplicationSubmissionContext {
// Convert applicationTags to lower case and add
this.applicationTags = new HashSet<String>();
for (String tag : tags) {
- this.applicationTags.add(tag.toLowerCase());
+ this.applicationTags.add(tag.toLowerCase(Locale.ENGLISH));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
index c848828..45d9b61 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java
@@ -241,7 +241,8 @@ public class HamletGen {
puts(indent, "\n",
"private <T extends _> ", retName, "<T> ", methodName,
"_(T e, boolean inline) {\n",
- " return new ", retName, "<T>(\"", retName.toLowerCase(Locale.US),
+ " return new ", retName, "<T>(\"",
+ retName.toLowerCase(Locale.ENGLISH),
"\", e, opt(", !endTagOptional.contains(retName), ", inline, ",
retName.equals("PRE"), ")); }");
}
@@ -258,7 +259,7 @@ public class HamletGen {
puts(0, ") {");
puts(indent,
topMode ? "" : " closeAttrs();\n",
- " return ", retName.toLowerCase(Locale.US), "_(this, ",
+ " return ", retName.toLowerCase(Locale.ENGLISH), "_(this, ",
isInline(className, retName), ");\n", "}");
} else if (params.length == 1) {
puts(0, "String selector) {");
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index 2040f57..4140df4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
import java.util.Collections;
+import java.util.Locale;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
@@ -147,7 +148,8 @@ public class AHSWebServices extends WebServices {
}
Set<String> appStates = parseQueries(statesQuery, true);
for (String appState : appStates) {
- switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
+ switch (YarnApplicationState.valueOf(
+ appState.toUpperCase(Locale.ENGLISH))) {
case FINISHED:
case FAILED:
case KILLED:
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
index 0907f2c..44ba2b9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java
@@ -24,6 +24,7 @@ import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
@@ -417,7 +418,7 @@ public class TimelineWebServices {
String[] strs = str.split(delimiter);
List<Field> fieldList = new ArrayList<Field>();
for (String s : strs) {
- s = s.trim().toUpperCase();
+ s = s.trim().toUpperCase(Locale.ENGLISH);
if (s.equals("EVENTS")) {
fieldList.add(Field.EVENTS);
} else if (s.equals("LASTEVENTONLY")) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
index 385d10a..cc9000f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java
@@ -23,6 +23,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
+import java.util.Locale;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
@@ -164,7 +165,7 @@ public class WebServices {
if (checkAppStates
&& !appStates.contains(appReport.getYarnApplicationState().toString()
- .toLowerCase())) {
+ .toLowerCase(Locale.ENGLISH))) {
continue;
}
if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) {
@@ -186,7 +187,7 @@ public class WebServices {
}
if (checkAppTypes
&& !appTypes.contains(appReport.getApplicationType().trim()
- .toLowerCase())) {
+ .toLowerCase(Locale.ENGLISH))) {
continue;
}
@@ -368,7 +369,8 @@ public class WebServices {
if (isState) {
try {
// enum string is in the uppercase
- YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
+ YarnApplicationState.valueOf(
+ paramStr.trim().toUpperCase(Locale.ENGLISH));
} catch (RuntimeException e) {
YarnApplicationState[] stateArray =
YarnApplicationState.values();
@@ -378,7 +380,7 @@ public class WebServices {
+ allAppStates);
}
}
- params.add(paramStr.trim().toLowerCase());
+ params.add(paramStr.trim().toLowerCase(Locale.ENGLISH));
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
index d3ccb91..38f7b93 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
@@ -29,6 +29,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
@@ -754,7 +755,7 @@ public class ClientRMService extends AbstractService implements
if (applicationTypes != null && !applicationTypes.isEmpty()) {
String appTypeToMatch = caseSensitive
? application.getApplicationType()
- : application.getApplicationType().toLowerCase();
+ : application.getApplicationType().toLowerCase(Locale.ENGLISH);
if (!applicationTypes.contains(appTypeToMatch)) {
continue;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
index 230f9a9..e95b725 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.resourcemanager.resource;
+import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
@@ -61,7 +62,7 @@ public class ResourceWeights {
sb.append(", ");
}
ResourceType resourceType = ResourceType.values()[i];
- sb.append(resourceType.name().toLowerCase());
+ sb.append(resourceType.name().toLowerCase(Locale.ENGLISH));
sb.append(String.format(" weight=%.1f", getWeight(resourceType)));
}
sb.append(">");
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
index 3528c2d..5e6d3eb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
@@ -24,6 +24,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
@@ -394,7 +395,8 @@ public class CapacitySchedulerConfiguration extends ReservationSchedulerConfigur
public QueueState getState(String queue) {
String state = get(getQueuePrefix(queue) + STATE);
return (state != null) ?
- QueueState.valueOf(state.toUpperCase()) : QueueState.RUNNING;
+ QueueState.valueOf(state.toUpperCase(Locale.ENGLISH)) :
+ QueueState.RUNNING;
}
public void setAccessibleNodeLabels(String queue, Set<String> labels) {
@@ -490,7 +492,7 @@ public class CapacitySchedulerConfiguration extends ReservationSchedulerConfigur
}
private static String getAclKey(QueueACL acl) {
- return "acl_" + acl.toString().toLowerCase();
+ return "acl_" + acl.toString().toLowerCase(Locale.ENGLISH);
}
public AccessControlList getAcl(String queue, QueueACL acl) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
index 32ef906..0922092 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -241,7 +242,7 @@ public class FairSchedulerConfiguration extends Configuration {
public static Resource parseResourceConfigValue(String val)
throws AllocationConfigurationException {
try {
- val = val.toLowerCase();
+ val = val.toLowerCase(Locale.ENGLISH);
int memory = findResource(val, "mb");
int vcores = findResource(val, "vcores");
return BuilderUtils.newResource(memory, vcores);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
index cc28afc..23e7b81 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
+import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.util.ReflectionUtils;
@@ -72,7 +73,7 @@ public abstract class SchedulingPolicy {
throws AllocationConfigurationException {
@SuppressWarnings("rawtypes")
Class clazz;
- String text = policy.toLowerCase();
+ String text = policy.toLowerCase(Locale.ENGLISH);
if (text.equalsIgnoreCase(FairSharePolicy.NAME)) {
clazz = FairSharePolicy.class;
} else if (text.equalsIgnoreCase(FifoPolicy.NAME)) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
index f28a9a8..881ecb7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
+import java.util.Locale;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NODE_STATE;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NODE_LABEL;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
@@ -77,7 +78,7 @@ class NodesPage extends RmView {
.th(".nodeManagerVersion", "Version")._()._().tbody();
NodeState stateFilter = null;
if (type != null && !type.isEmpty()) {
- stateFilter = NodeState.valueOf(type.toUpperCase());
+ stateFilter = NodeState.valueOf(type.toUpperCase(Locale.ENGLISH));
}
Collection<RMNode> rmNodes = this.rm.getRMContext().getRMNodes().values();
boolean isInactive = false;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
index 1834b6a..fb40054 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
@@ -30,6 +30,7 @@ import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
@@ -257,7 +258,8 @@ public class RMWebServices {
} else {
acceptedStates = EnumSet.noneOf(NodeState.class);
for (String stateStr : states.split(",")) {
- acceptedStates.add(NodeState.valueOf(stateStr.toUpperCase()));
+ acceptedStates.add(NodeState.valueOf(
+ stateStr.toUpperCase(Locale.ENGLISH)));
}
}
@@ -506,7 +508,7 @@ public class RMWebServices {
// if no states, returns the counts of all RMAppStates
if (states.size() == 0) {
for (YarnApplicationState state : YarnApplicationState.values()) {
- states.add(state.toString().toLowerCase());
+ states.add(state.toString().toLowerCase(Locale.ENGLISH));
}
}
// in case we extend to multiple applicationTypes in the future
@@ -518,8 +520,9 @@ public class RMWebServices {
ConcurrentMap<ApplicationId, RMApp> apps = rm.getRMContext().getRMApps();
for (RMApp rmapp : apps.values()) {
YarnApplicationState state = rmapp.createApplicationState();
- String type = rmapp.getApplicationType().trim().toLowerCase();
- if (states.contains(state.toString().toLowerCase())) {
+ String type =
+ rmapp.getApplicationType().trim().toLowerCase(Locale.ENGLISH);
+ if (states.contains(state.toString().toLowerCase(Locale.ENGLISH))) {
if (types.contains(ANY)) {
countApp(scoreboard, state, ANY);
} else if (types.contains(type)) {
@@ -554,7 +557,8 @@ public class RMWebServices {
if (isState) {
try {
// enum string is in the uppercase
- YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
+ YarnApplicationState.valueOf(
+ paramStr.trim().toUpperCase(Locale.ENGLISH));
} catch (RuntimeException e) {
YarnApplicationState[] stateArray =
YarnApplicationState.values();
@@ -564,7 +568,7 @@ public class RMWebServices {
+ " specified. It should be one of " + allAppStates);
}
}
- params.add(paramStr.trim().toLowerCase());
+ params.add(paramStr.trim().toLowerCase(Locale.ENGLISH));
}
}
}
@@ -582,7 +586,8 @@ public class RMWebServices {
for (String state : states) {
Map<String, Long> partScoreboard = new HashMap<String, Long>();
scoreboard.put(
- YarnApplicationState.valueOf(state.toUpperCase()), partScoreboard);
+ YarnApplicationState.valueOf(
+ state.toUpperCase(Locale.ENGLISH)), partScoreboard);
// types is verified no to be empty
for (String type : types) {
partScoreboard.put(type, 0L);