You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by en...@apache.org on 2013/07/03 00:54:47 UTC
svn commit: r1499142 - in
/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src:
./ main/docs/ main/java/ main/java/org/apache/hadoop/util/ test/core/
test/java/org/apache/hadoop/util/
Author: enis
Date: Tue Jul 2 22:54:47 2013
New Revision: 1499142
URL: http://svn.apache.org/r1499142
Log:
HADOOP-9660 Merge change 1499132 from trunk to branch-2.
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/ (props changed)
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Tue Jul 2 22:54:47 2013
@@ -0,0 +1,2 @@
+/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src:1227776-1294021
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166009,1166402,1167318,1167383,1170379,1170459,1171221,1171297,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1179869,1182189,1182205,1182214,1182641,1183132,1189357,1189613,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1198924,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204363,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1209246,1210208,1210319-1210320,1212004,1212021,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213598,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1225114,1225192,1225456,1225489,1225591,1226211,1226239,1226350,1227091,1227165,1227423,1227861,1227964,1229347,
1230398,1231569,1231572,1231627,1231640,1231732,1233605,1234555,1235135,1235137,1235956,1236444,1236456,1239386,1239752,1240897,1240928,1242485,1243065,1243104,1244185,1244766,1245751,1245762,1291602,1293071,1293419,1295061,1295227,1295240,1295251,1296556,1298044,1298696,1298700,1299045,1299434,1299963,1301250,1301303,1301308,1301312,1301820,1301871,1302624,1302704-1302705,1303474,1304063,1304099,1304112,1304118,1304542,1305230,1305891,1306935,1307106,1308006,1308180,1308192,1308457,1308485,1309625,1309994,1310185,1311556,1312029,1325069,1325367,1326890,1329319,1329541,1332363,1332839,1333321,1333557,1334216,1335258,1336491,1336945,1336966,1337283,1337334,1337339,1337415,1338806,1339906,1339970,1340268,1340750,1342112,1342483,1342534,1343913,1343944,1344386,1344419,1344423,1344840,1344874,1345563,1346148,1347133,1347867,1348207,1349124,1349456,1349459,1349466,1349561,1349616,1349641,1351445,1351818,1355211,1357442,1360448,1361813,1362278,1372583,1372649,1373683,1374696,1375216,13753
01,1375450,1375829,1376123,1376322,1378444,1378969,1380984,1381419,1381845,1384435,1393243,1395703,1406939,1414878,1423189,1430682,1430688,1440245,1442386,1442755,1446428,1452581,1454593,1459392,1475959,1476395,1477849,1479143,1480838,1480840,1487093,1499132
Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1499132
Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1499132
Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=1499142&r1=1499141&r2=1499142&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java (original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java Tue Jul 2 22:54:47 2013
@@ -25,6 +25,7 @@ import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import org.apache.commons.cli.CommandLine;
@@ -413,7 +414,50 @@ public class GenericOptionsParser {
}
return StringUtils.arrayToString(finalArr);
}
-
+
+ /**
+ * Windows powershell and cmd can parse key=value themselves, because
+ * /pkey=value is same as /pkey value under windows. However this is not
+ * compatible with how we get arbitrary key values in -Dkey=value format.
+ * Under windows -D key=value or -Dkey=value might be passed as
+ * [-Dkey, value] or [-D key, value]. This method does undo these and
+ * return a modified args list by manually changing [-D, key, value]
+ * into [-D, key=value]
+ *
+ * @param args command line arguments
+ * @return fixed command line arguments that GnuParser can parse
+ */
+ private String[] preProcessForWindows(String[] args) {
+ if (!Shell.WINDOWS) {
+ return args;
+ }
+ List<String> newArgs = new ArrayList<String>(args.length);
+ for (int i=0; i < args.length; i++) {
+ String prop = null;
+ if (args[i].equals("-D")) {
+ newArgs.add(args[i]);
+ if (i < args.length - 1) {
+ prop = args[++i];
+ }
+ } else if (args[i].startsWith("-D")) {
+ prop = args[i];
+ } else {
+ newArgs.add(args[i]);
+ }
+ if (prop != null) {
+ if (prop.contains("=")) {
+ // everything good
+ } else {
+ if (i < args.length - 1) {
+ prop += "=" + args[++i];
+ }
+ }
+ newArgs.add(prop);
+ }
+ }
+
+ return newArgs.toArray(new String[newArgs.size()]);
+ }
/**
* Parse the user-specified options, get the generic options, and modify
@@ -427,7 +471,7 @@ public class GenericOptionsParser {
opts = buildGeneralOptions(opts);
CommandLineParser parser = new GnuParser();
try {
- commandLine = parser.parse(opts, args, true);
+ commandLine = parser.parse(opts, preProcessForWindows(args), true);
processGeneralOptions(conf, commandLine);
} catch(ParseException e) {
LOG.warn("options parsing failed: "+e.getMessage());
Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1499132
Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=1499142&r1=1499141&r2=1499142&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java (original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java Tue Jul 2 22:54:47 2013
@@ -21,6 +21,8 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
+import java.util.Arrays;
+import java.util.Map;
import junit.framework.TestCase;
@@ -35,6 +37,9 @@ import org.apache.hadoop.security.token.
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
+import org.junit.Assert;
+
+import com.google.common.collect.Maps;
public class TestGenericOptionsParser extends TestCase {
File testDir;
@@ -191,4 +196,90 @@ public class TestGenericOptionsParser ex
localFs.delete(new Path(testDir.getAbsolutePath()), true);
}
+
+ /** Test -D parsing */
+ public void testDOptionParsing() throws Exception {
+ String[] args;
+ Map<String,String> expectedMap;
+ String[] expectedRemainingArgs;
+
+ args = new String[]{};
+ expectedRemainingArgs = new String[]{};
+ expectedMap = Maps.newHashMap();
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-Dkey1=value1"};
+ expectedRemainingArgs = new String[]{};
+ expectedMap = Maps.newHashMap();
+ expectedMap.put("key1", "value1");
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-Dkey1=value1", "arg1"};
+ expectedRemainingArgs = new String[]{"arg1"};
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-D", "key1=value1", "arg1"};
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ if (Shell.WINDOWS) {
+ args = new String[]{"-fs", "hdfs://somefs/", "-D", "key1",
+ "value1", "arg1"};
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-Dkey1", "value1", "arg1"};
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-D", "key1", "value1",
+ "-fs", "someother", "-D", "key2", "value2", "arg1", "arg2"};
+ expectedRemainingArgs = new String[]{"arg1", "arg2"};
+ expectedMap = Maps.newHashMap();
+ expectedMap.put("key1", "value1");
+ expectedMap.put("key2", "value2");
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-D", "key1", "value1",
+ "-fs", "someother", "-D", "key2", "value2"};
+ expectedRemainingArgs = new String[]{};
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-D", "key1", "value1",
+ "-fs", "someother", "-D", "key2"};
+ expectedMap = Maps.newHashMap();
+ expectedMap.put("key1", "value1");
+ expectedMap.put("key2", null); // we expect key2 not set
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+ }
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-D", "key1=value1",
+ "-fs", "someother", "-Dkey2"};
+ expectedRemainingArgs = new String[]{};
+ expectedMap = Maps.newHashMap();
+ expectedMap.put("key1", "value1");
+ expectedMap.put("key2", null); // we expect key2 not set
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+
+ args = new String[]{"-fs", "hdfs://somefs/", "-D"};
+ expectedMap = Maps.newHashMap();
+ assertDOptionParsing(args, expectedMap, expectedRemainingArgs);
+ }
+
+ private void assertDOptionParsing(String[] args,
+ Map<String,String> expectedMap, String[] expectedRemainingArgs)
+ throws Exception {
+ for (Map.Entry<String, String> entry : expectedMap.entrySet()) {
+ assertNull(conf.get(entry.getKey()));
+ }
+
+ Configuration conf = new Configuration();
+ GenericOptionsParser parser = new GenericOptionsParser(conf, args);
+ String[] remainingArgs = parser.getRemainingArgs();
+
+ for (Map.Entry<String, String> entry : expectedMap.entrySet()) {
+ assertEquals(entry.getValue(), conf.get(entry.getKey()));
+ }
+
+ Assert.assertArrayEquals(
+ Arrays.toString(remainingArgs) + Arrays.toString(expectedRemainingArgs),
+ expectedRemainingArgs, remainingArgs);
+ }
}