You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/01/16 05:24:33 UTC
svn commit: r1231834 - in
/hadoop/common/branches/branch-0.23-PB/hadoop-common-project: ./
hadoop-auth/ hadoop-common/ hadoop-common/src/main/docs/
hadoop-common/src/main/docs/src/documentation/content/xdocs/
hadoop-common/src/main/java/ hadoop-common/...
Author: szetszwo
Date: Mon Jan 16 04:24:24 2012
New Revision: 1231834
URL: http://svn.apache.org/viewvc?rev=1231834&view=rev
Log:
Merge r1227776 through r1231827 from 0.23.
Added:
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/NetUtilsTestResolver.java
- copied unchanged from r1231827, hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/NetUtilsTestResolver.java
Removed:
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/NetUtilsTestResolver.java
Modified:
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/ (props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-auth/ (props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/ (props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1163981,1164255,1164301,1164339,1166009,1166402,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-common-project:1227776-1231827
+/hadoop/common/trunk/hadoop-common-project:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1163981,1164255,1164301,1164339,1166009,1166402,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1182205,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-auth/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-auth:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1163981,1164255,1164301,1164339,1166402,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1179869,1189357,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-auth:1227776-1231827
+/hadoop/common/trunk/hadoop-common-project/hadoop-auth:1161777,1161781,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1163981,1164255,1164301,1164339,1166402,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1179869,1182205,1189357,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166009,1166402,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1179869,1182641,1183132,1189357,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1204114,1204117,1204122,1204124,1204129,1204131,1204363,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1209246,1210208,1210319-1210320,1212004,1212021,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213598,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common:1227776-1231827
+/hadoop/common/trunk/hadoop-common-project/hadoop-common:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166009,1166402,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1179869,1182205,1182641,1183132,1189357,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204363,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1209246,1210208,1210319-1210320,1212004,1212021,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213598,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/CHANGES.txt Mon Jan 16 04:24:24 2012
@@ -64,6 +64,8 @@ Release 0.23.1 - Unreleased
HADOOP-7657. Add support for LZ4 compression. (Binglin Chang via todd)
+ HADOOP-7910. Add Configuration.getLongBytes to handle human readable byte size values. (Sho Shimauchi via harsh)
+
IMPROVEMENTS
HADOOP-7801. HADOOP_PREFIX cannot be overriden. (Bruno Mahé via tomwhite)
@@ -108,6 +110,13 @@ Release 0.23.1 - Unreleased
hostname in token instead of IP to allow server IP change.
(Daryn Sharp via suresh)
+ HADOOP-7934. Normalize dependencies versions across all modules. (tucu)
+
+ HADOOP-7348. Change 'addnl' in getmerge util to be a flag '-nl' instead.
+ (XieXianshan via harsh)
+
+ HADOOP-7975. Add LZ4 as an entry in the default codec list, missed by HADOOP-7657 (harsh)
+
OPTIMIZATIONS
BUG FIXES
@@ -168,6 +177,19 @@ Release 0.23.1 - Unreleased
HADOOP-7949. Updated maxIdleTime default in the code to match
core-default.xml (eli)
+ HADOOP-7907. hadoop-tools JARs are not part of the distro. (tucu)
+
+ HADOOP-7936. There's a Hoop README in the root dir of the tarball. (tucu)
+
+ HADOOP-7963. Fix ViewFS to catch a null canonical service-name and pass
+ tests TestViewFileSystem* (Siddharth Seth via vinodkv)
+
+ HADOOP-7964. Deadlock in NetUtils and SecurityUtil class initialization.
+ (Daryn Sharp via suresh)
+
+ HADOOP-7974. TestViewFsTrash incorrectly determines the user's home
+ directory. (harsh via eli)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
@@ -914,6 +936,9 @@ Release 0.22.1 - Unreleased
BUG FIXES
+ HADOOP-7937. Forward port SequenceFile#syncFs and friends from Hadoop 1.x.
+ (tomwhite)
+
Release 0.22.0 - 2011-11-29
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1,5 +1,6 @@
+/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt:1227776-1231827
/hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164771,1166009,1166402,1167318,1167383,1169986,1170046,1170379,1170459,1171297,1171894,1171909,1172186,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1179869,1182641,1183132,1189357,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1204114,1204117,1204122,1204124,1204129,1204131,1204363,1204376,1204388,1205260,1206830,1207694,1208153,1208313,1209246,1210208,1210319-1210320,1212004,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213598,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1226211,1226351,1227091,1227423
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164771,1166009,1166402,1167318,1167383,1169986,1170046,1170379,1170459,1171297,1171894,1171909,1172186,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1179869,1182205,1182641,1183132,1189357,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204363,1204376,1204388,1205260,1206830,1207694,1208153,1208313,1209246,1210208,1210319-1210320,1212004,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213598,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1226351,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
/hadoop/core/branches/branch-0.18/CHANGES.txt:727226
/hadoop/core/branches/branch-0.19/CHANGES.txt:713112
/hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1,2 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166402,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1183132,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1209246,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/docs:1227776-1231827
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166402,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1182205,1183132,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1209246,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
/hadoop/core/branches/branch-0.19/src/docs:713112
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml Mon Jan 16 04:24:24 2012
@@ -628,8 +628,11 @@
<tr>
<td>conf/hdfs-site.xml</td>
<td>dfs.blocksize</td>
- <td>134217728</td>
- <td>HDFS blocksize of 128MB for large file-systems.</td>
+ <td>128m</td>
+ <td>
+ HDFS blocksize of 128 MB for large file-systems. Sizes can be provided
+ in size-prefixed values (10k, 128m, 1g, etc.) or simply in bytes (134217728 for 128 MB, etc.).
+ </td>
</tr>
<tr>
<td>conf/hdfs-site.xml</td>
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml Mon Jan 16 04:24:24 2012
@@ -260,11 +260,11 @@
<section>
<title> getmerge </title>
<p>
- <code>Usage: hdfs dfs -getmerge <src> <localdst> [addnl]</code>
+ <code>Usage: hdfs dfs -getmerge [-nl] <src> <localdst></code>
</p>
<p>
Takes a source directory and a destination file as input and concatenates files in src into the destination local file.
- Optionally <code>addnl</code> can be set to enable adding a newline character at the end of each file.
+ Optionally <code>-nl</code> flag can be set to enable adding a newline character at the end of each file during merge.
</p>
</section>
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1,3 +1,4 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166402,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1182641,1183132,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1204114,1204117,1204122,1204124,1204129,1204131,1204363,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1210208,1210319-1210320,1212004,1212021,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java:1227776-1231827
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1164771,1166402,1167318,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1176986,1177002,1177035,1177399,1177487,1177531,1177859,1177864,1178639,1179512,1182205,1182641,1183132,1189932,1189982,1190109,1190611,1195575,1195760,1196113,1196129,1197885,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204363,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1210208,1210319-1210320,1212004,1212021,1212062,1212073,1212084,1212615,1213537,1213586,1213592-1213593,1213619,1213954,1214046,1214114,1214499,1215358,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
/hadoop/core/branches/branch-0.19/core/src/java:713112
/hadoop/core/trunk/src/core:776175-785643,785929-786278
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Mon Jan 16 04:24:24 2012
@@ -737,6 +737,27 @@ public class Configuration implements It
return Long.parseLong(valueString);
}
+ /**
+ * Get the value of the <code>name</code> property as a <code>long</code> or
+ * human readable format. If no such property exists, the provided default
+ * value is returned, or if the specified value is not a valid
+ * <code>long</code> or human readable format, then an error is thrown. You
+ * can use the following suffix (case insensitive): k(kilo), m(mega), g(giga),
+ * t(tera), p(peta), e(exa)
+ *
+ * @param name property name.
+ * @param defaultValue default value.
+ * @throws NumberFormatException when the value is invalid
+ * @return property value as a <code>long</code>,
+ * or <code>defaultValue</code>.
+ */
+ public long getLongBytes(String name, long defaultValue) {
+ String valueString = getTrimmed(name);
+ if (valueString == null)
+ return defaultValue;
+ return StringUtils.TraditionalBinaryPrefix.string2long(valueString);
+ }
+
private String getHexDigits(String value) {
boolean negative = false;
String str = value;
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Mon Jan 16 04:24:24 2012
@@ -45,26 +45,22 @@ class CopyCommands {
/** merge multiple files together */
public static class Merge extends FsCommand {
public static final String NAME = "getmerge";
- public static final String USAGE = "<src> <localdst> [addnl]";
+ public static final String USAGE = "[-nl] <src> <localdst>";
public static final String DESCRIPTION =
"Get all the files in the directories that\n" +
"match the source file pattern and merge and sort them to only\n" +
- "one file on local fs. <src> is kept.";
+ "one file on local fs. <src> is kept.\n" +
+ " -nl Add a newline character at the end of each file.";
protected PathData dst = null;
protected String delimiter = null;
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
- CommandFormat cf = new CommandFormat(2, 3);
+ CommandFormat cf = new CommandFormat(2, 3, "nl");
cf.parse(args);
- // TODO: this really should be a -nl option
- if ((args.size() > 2) && Boolean.parseBoolean(args.removeLast())) {
- delimiter = "\n";
- } else {
- delimiter = null;
- }
+ delimiter = cf.getOpt("nl") ? "\n" : null;
dst = new PathData(new File(args.removeLast()), getConf());
}
@@ -197,4 +193,4 @@ class CopyCommands {
public static final String USAGE = Get.USAGE;
public static final String DESCRIPTION = "Identical to the -get command.";
}
-}
\ No newline at end of file
+}
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Mon Jan 16 04:24:24 2012
@@ -514,7 +514,7 @@ public class ViewFileSystem extends File
for (int i = 0; i < mountPoints.size(); ++i) {
String serviceName =
mountPoints.get(i).target.targetFileSystem.getCanonicalServiceName();
- if (seenServiceNames.contains(serviceName)) {
+ if (serviceName == null || seenServiceNames.contains(serviceName)) {
continue;
}
seenServiceNames.add(serviceName);
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Mon Jan 16 04:24:24 2012
@@ -1193,6 +1193,13 @@ public class SequenceFile {
}
}
+ /** flush all currently written data to the file system */
+ public void syncFs() throws IOException {
+ if (out != null) {
+ out.sync(); // flush contents to file system
+ }
+ }
+
/** Returns the configuration of this file. */
Configuration getConf() { return conf; }
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Mon Jan 16 04:24:24 2012
@@ -51,12 +51,6 @@ import org.apache.hadoop.ipc.VersionedPr
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.util.ReflectionUtils;
-import com.google.common.annotations.VisibleForTesting;
-
-//this will need to be replaced someday when there is a suitable replacement
-import sun.net.dns.ResolverConfiguration;
-import sun.net.util.IPAddressUtil;
-
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Unstable
public class NetUtils {
@@ -72,26 +66,6 @@ public class NetUtils {
/** Base URL of the Hadoop Wiki: {@value} */
public static final String HADOOP_WIKI = "http://wiki.apache.org/hadoop/";
- private static HostResolver hostResolver;
-
- static {
- // SecurityUtils requires a more secure host resolver if tokens are
- // using hostnames
- setUseQualifiedHostResolver(!SecurityUtil.getTokenServiceUseIp());
- }
-
- /**
- * This method is intended for use only by SecurityUtils!
- * @param flag where the qualified or standard host resolver is used
- * to create socket addresses
- */
- @InterfaceAudience.Private
- public static void setUseQualifiedHostResolver(boolean flag) {
- hostResolver = flag
- ? new QualifiedHostResolver()
- : new StandardHostResolver();
- }
-
/**
* Get the socket factory for the given class according to its
* configuration parameter
@@ -249,7 +223,7 @@ public class NetUtils {
InetSocketAddress addr;
try {
- InetAddress iaddr = hostResolver.getByName(resolveHost);
+ InetAddress iaddr = SecurityUtil.getByName(resolveHost);
// if there is a static entry for the host, make the returned
// address look like the original given host
if (staticHost != null) {
@@ -261,151 +235,6 @@ public class NetUtils {
}
return addr;
}
-
- interface HostResolver {
- InetAddress getByName(String host) throws UnknownHostException;
- }
-
- /**
- * Uses standard java host resolution
- */
- static class StandardHostResolver implements HostResolver {
- public InetAddress getByName(String host) throws UnknownHostException {
- return InetAddress.getByName(host);
- }
- }
-
- /**
- * This an alternate resolver with important properties that the standard
- * java resolver lacks:
- * 1) The hostname is fully qualified. This avoids security issues if not
- * all hosts in the cluster do not share the same search domains. It
- * also prevents other hosts from performing unnecessary dns searches.
- * In contrast, InetAddress simply returns the host as given.
- * 2) The InetAddress is instantiated with an exact host and IP to prevent
- * further unnecessary lookups. InetAddress may perform an unnecessary
- * reverse lookup for an IP.
- * 3) A call to getHostName() will always return the qualified hostname, or
- * more importantly, the IP if instantiated with an IP. This avoids
- * unnecessary dns timeouts if the host is not resolvable.
- * 4) Point 3 also ensures that if the host is re-resolved, ex. during a
- * connection re-attempt, that a reverse lookup to host and forward
- * lookup to IP is not performed since the reverse/forward mappings may
- * not always return the same IP. If the client initiated a connection
- * with an IP, then that IP is all that should ever be contacted.
- *
- * NOTE: this resolver is only used if:
- * hadoop.security.token.service.use_ip=false
- */
- protected static class QualifiedHostResolver implements HostResolver {
- @SuppressWarnings("unchecked")
- private List<String> searchDomains =
- ResolverConfiguration.open().searchlist();
-
- /**
- * Create an InetAddress with a fully qualified hostname of the given
- * hostname. InetAddress does not qualify an incomplete hostname that
- * is resolved via the domain search list.
- * {@link InetAddress#getCanonicalHostName()} will fully qualify the
- * hostname, but it always return the A record whereas the given hostname
- * may be a CNAME.
- *
- * @param host a hostname or ip address
- * @return InetAddress with the fully qualified hostname or ip
- * @throws UnknownHostException if host does not exist
- */
- public InetAddress getByName(String host) throws UnknownHostException {
- InetAddress addr = null;
-
- if (IPAddressUtil.isIPv4LiteralAddress(host)) {
- // use ipv4 address as-is
- byte[] ip = IPAddressUtil.textToNumericFormatV4(host);
- addr = InetAddress.getByAddress(host, ip);
- } else if (IPAddressUtil.isIPv6LiteralAddress(host)) {
- // use ipv6 address as-is
- byte[] ip = IPAddressUtil.textToNumericFormatV6(host);
- addr = InetAddress.getByAddress(host, ip);
- } else if (host.endsWith(".")) {
- // a rooted host ends with a dot, ex. "host."
- // rooted hosts never use the search path, so only try an exact lookup
- addr = getByExactName(host);
- } else if (host.contains(".")) {
- // the host contains a dot (domain), ex. "host.domain"
- // try an exact host lookup, then fallback to search list
- addr = getByExactName(host);
- if (addr == null) {
- addr = getByNameWithSearch(host);
- }
- } else {
- // it's a simple host with no dots, ex. "host"
- // try the search list, then fallback to exact host
- InetAddress loopback = InetAddress.getByName(null);
- if (host.equalsIgnoreCase(loopback.getHostName())) {
- addr = InetAddress.getByAddress(host, loopback.getAddress());
- } else {
- addr = getByNameWithSearch(host);
- if (addr == null) {
- addr = getByExactName(host);
- }
- }
- }
- // unresolvable!
- if (addr == null) {
- throw new UnknownHostException(host);
- }
- return addr;
- }
-
- InetAddress getByExactName(String host) {
- InetAddress addr = null;
- // InetAddress will use the search list unless the host is rooted
- // with a trailing dot. The trailing dot will disable any use of the
- // search path in a lower level resolver. See RFC 1535.
- String fqHost = host;
- if (!fqHost.endsWith(".")) fqHost += ".";
- try {
- addr = getInetAddressByName(fqHost);
- // can't leave the hostname as rooted or other parts of the system
- // malfunction, ex. kerberos principals are lacking proper host
- // equivalence for rooted/non-rooted hostnames
- addr = InetAddress.getByAddress(host, addr.getAddress());
- } catch (UnknownHostException e) {
- // ignore, caller will throw if necessary
- }
- return addr;
- }
-
- InetAddress getByNameWithSearch(String host) {
- InetAddress addr = null;
- if (host.endsWith(".")) { // already qualified?
- addr = getByExactName(host);
- } else {
- for (String domain : searchDomains) {
- String dot = !domain.startsWith(".") ? "." : "";
- addr = getByExactName(host + dot + domain);
- if (addr != null) break;
- }
- }
- return addr;
- }
-
- // implemented as a separate method to facilitate unit testing
- InetAddress getInetAddressByName(String host) throws UnknownHostException {
- return InetAddress.getByName(host);
- }
-
- void setSearchDomains(String ... domains) {
- searchDomains = Arrays.asList(domains);
- }
- }
-
- /**
- * This is for testing only!
- */
- @VisibleForTesting
- static void setHostResolver(HostResolver newResolver) {
- hostResolver = newResolver;
- }
/**
* Resolve the uri's hostname and add the default port if not in the uri
@@ -447,7 +276,7 @@ public class NetUtils {
String fqHost = canonicalizedHostCache.get(host);
if (fqHost == null) {
try {
- fqHost = hostResolver.getByName(host).getHostName();
+ fqHost = SecurityUtil.getByName(host).getHostName();
// slight race condition, but won't hurt
canonicalizedHostCache.put(host, fqHost);
} catch (UnknownHostException e) {
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Mon Jan 16 04:24:24 2012
@@ -23,6 +23,8 @@ import java.net.URI;
import java.net.URL;
import java.net.UnknownHostException;
import java.security.AccessController;
+import java.util.Arrays;
+import java.util.List;
import java.util.ServiceLoader;
import java.util.Set;
@@ -41,6 +43,11 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo;
+import com.google.common.annotations.VisibleForTesting;
+
+//this will need to be replaced someday when there is a suitable replacement
+import sun.net.dns.ResolverConfiguration;
+import sun.net.util.IPAddressUtil;
import sun.security.jgss.krb5.Krb5Util;
import sun.security.krb5.Credentials;
import sun.security.krb5.PrincipalName;
@@ -53,7 +60,10 @@ public class SecurityUtil {
// controls whether buildTokenService will use an ip or host/ip as given
// by the user
- private static boolean useIpForTokenService;
+ @VisibleForTesting
+ static boolean useIpForTokenService;
+ @VisibleForTesting
+ static HostResolver hostResolver;
static {
boolean useIp = new Configuration().getBoolean(
@@ -68,16 +78,9 @@ public class SecurityUtil {
@InterfaceAudience.Private
static void setTokenServiceUseIp(boolean flag) {
useIpForTokenService = flag;
- NetUtils.setUseQualifiedHostResolver(!flag);
- }
-
- /**
- * Intended only for temporary use by NetUtils. Do not use.
- * @return whether tokens use an IP address
- */
- @InterfaceAudience.Private
- public static boolean getTokenServiceUseIp() {
- return useIpForTokenService;
+ hostResolver = !useIpForTokenService
+ ? new QualifiedHostResolver()
+ : new StandardHostResolver();
}
/**
@@ -142,7 +145,7 @@ public class SecurityUtil {
* it will be removed when the Java behavior is changed.
*
* @param remoteHost Target URL the krb-https client will access
- * @throws IOException
+ * @throws IOException if the service ticket cannot be retrieved
*/
public static void fetchServiceTicket(URL remoteHost) throws IOException {
if(!UserGroupInformation.isSecurityEnabled())
@@ -179,7 +182,7 @@ public class SecurityUtil {
* @param hostname
* the fully-qualified domain name used for substitution
* @return converted Kerberos principal name
- * @throws IOException
+ * @throws IOException if the client address cannot be determined
*/
public static String getServerPrincipal(String principalConfig,
String hostname) throws IOException {
@@ -204,7 +207,7 @@ public class SecurityUtil {
* @param addr
* InetAddress of the host used for substitution
* @return converted Kerberos principal name
- * @throws IOException
+ * @throws IOException if the client address cannot be determined
*/
public static String getServerPrincipal(String principalConfig,
InetAddress addr) throws IOException {
@@ -251,7 +254,7 @@ public class SecurityUtil {
* the key to look for keytab file in conf
* @param userNameKey
* the key to look for user's Kerberos principal name in conf
- * @throws IOException
+ * @throws IOException if login fails
*/
public static void login(final Configuration conf,
final String keytabFileKey, final String userNameKey) throws IOException {
@@ -271,7 +274,7 @@ public class SecurityUtil {
* the key to look for user's Kerberos principal name in conf
* @param hostname
* hostname to use for substitution
- * @throws IOException
+ * @throws IOException if the config doesn't specify a keytab
*/
public static void login(final Configuration conf,
final String keytabFileKey, final String userNameKey, String hostname)
@@ -363,7 +366,7 @@ public class SecurityUtil {
* Look up the TokenInfo for a given protocol. It searches all known
* SecurityInfo providers.
* @param protocol The protocol class to get the information for.
- * @conf conf Configuration object
+ * @param conf Configuration object
* @return the TokenInfo or null if it has no KerberosInfo defined
*/
public static TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
@@ -442,4 +445,155 @@ public class SecurityUtil {
public static Text buildTokenService(URI uri) {
return buildTokenService(NetUtils.createSocketAddr(uri.getAuthority()));
}
+
+ /**
+ * Resolves a host subject to the security requirements determined by
+ * hadoop.security.token.service.use_ip.
+ *
+ * @param hostname host or ip to resolve
+ * @return a resolved host
+ * @throws UnknownHostException if the host doesn't exist
+ */
+ @InterfaceAudience.Private
+ public static
+ InetAddress getByName(String hostname) throws UnknownHostException {
+ return hostResolver.getByName(hostname);
+ }
+
+ interface HostResolver {
+ InetAddress getByName(String host) throws UnknownHostException;
+ }
+
+ /**
+ * Uses standard java host resolution
+ */
+ static class StandardHostResolver implements HostResolver {
+ public InetAddress getByName(String host) throws UnknownHostException {
+ return InetAddress.getByName(host);
+ }
+ }
+
+ /**
+ * This an alternate resolver with important properties that the standard
+ * java resolver lacks:
+ * 1) The hostname is fully qualified. This avoids security issues if not
+ * all hosts in the cluster do not share the same search domains. It
+ * also prevents other hosts from performing unnecessary dns searches.
+ * In contrast, InetAddress simply returns the host as given.
+ * 2) The InetAddress is instantiated with an exact host and IP to prevent
+ * further unnecessary lookups. InetAddress may perform an unnecessary
+ * reverse lookup for an IP.
+ * 3) A call to getHostName() will always return the qualified hostname, or
+ * more importantly, the IP if instantiated with an IP. This avoids
+ * unnecessary dns timeouts if the host is not resolvable.
+ * 4) Point 3 also ensures that if the host is re-resolved, ex. during a
+ * connection re-attempt, that a reverse lookup to host and forward
+ * lookup to IP is not performed since the reverse/forward mappings may
+ * not always return the same IP. If the client initiated a connection
+ * with an IP, then that IP is all that should ever be contacted.
+ *
+ * NOTE: this resolver is only used if:
+ * hadoop.security.token.service.use_ip=false
+ */
+ protected static class QualifiedHostResolver implements HostResolver {
+ @SuppressWarnings("unchecked")
+ private List<String> searchDomains =
+ ResolverConfiguration.open().searchlist();
+
+ /**
+ * Create an InetAddress with a fully qualified hostname of the given
+ * hostname. InetAddress does not qualify an incomplete hostname that
+ * is resolved via the domain search list.
+ * {@link InetAddress#getCanonicalHostName()} will fully qualify the
+ * hostname, but it always return the A record whereas the given hostname
+ * may be a CNAME.
+ *
+ * @param host a hostname or ip address
+ * @return InetAddress with the fully qualified hostname or ip
+ * @throws UnknownHostException if host does not exist
+ */
+ public InetAddress getByName(String host) throws UnknownHostException {
+ InetAddress addr = null;
+
+ if (IPAddressUtil.isIPv4LiteralAddress(host)) {
+ // use ipv4 address as-is
+ byte[] ip = IPAddressUtil.textToNumericFormatV4(host);
+ addr = InetAddress.getByAddress(host, ip);
+ } else if (IPAddressUtil.isIPv6LiteralAddress(host)) {
+ // use ipv6 address as-is
+ byte[] ip = IPAddressUtil.textToNumericFormatV6(host);
+ addr = InetAddress.getByAddress(host, ip);
+ } else if (host.endsWith(".")) {
+ // a rooted host ends with a dot, ex. "host."
+ // rooted hosts never use the search path, so only try an exact lookup
+ addr = getByExactName(host);
+ } else if (host.contains(".")) {
+ // the host contains a dot (domain), ex. "host.domain"
+ // try an exact host lookup, then fallback to search list
+ addr = getByExactName(host);
+ if (addr == null) {
+ addr = getByNameWithSearch(host);
+ }
+ } else {
+ // it's a simple host with no dots, ex. "host"
+ // try the search list, then fallback to exact host
+ InetAddress loopback = InetAddress.getByName(null);
+ if (host.equalsIgnoreCase(loopback.getHostName())) {
+ addr = InetAddress.getByAddress(host, loopback.getAddress());
+ } else {
+ addr = getByNameWithSearch(host);
+ if (addr == null) {
+ addr = getByExactName(host);
+ }
+ }
+ }
+ // unresolvable!
+ if (addr == null) {
+ throw new UnknownHostException(host);
+ }
+ return addr;
+ }
+
+ InetAddress getByExactName(String host) {
+ InetAddress addr = null;
+ // InetAddress will use the search list unless the host is rooted
+ // with a trailing dot. The trailing dot will disable any use of the
+ // search path in a lower level resolver. See RFC 1535.
+ String fqHost = host;
+ if (!fqHost.endsWith(".")) fqHost += ".";
+ try {
+ addr = getInetAddressByName(fqHost);
+ // can't leave the hostname as rooted or other parts of the system
+ // malfunction, ex. kerberos principals are lacking proper host
+ // equivalence for rooted/non-rooted hostnames
+ addr = InetAddress.getByAddress(host, addr.getAddress());
+ } catch (UnknownHostException e) {
+ // ignore, caller will throw if necessary
+ }
+ return addr;
+ }
+
+ InetAddress getByNameWithSearch(String host) {
+ InetAddress addr = null;
+ if (host.endsWith(".")) { // already qualified?
+ addr = getByExactName(host);
+ } else {
+ for (String domain : searchDomains) {
+ String dot = !domain.startsWith(".") ? "." : "";
+ addr = getByExactName(host + dot + domain);
+ if (addr != null) break;
+ }
+ }
+ return addr;
+ }
+
+ // implemented as a separate method to facilitate unit testing
+ InetAddress getInetAddressByName(String host) throws UnknownHostException {
+ return InetAddress.getByName(host);
+ }
+
+ void setSearchDomains(String ... domains) {
+ searchDomains = Arrays.asList(domains);
+ }
+ }
}
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Mon Jan 16 04:24:24 2012
@@ -661,7 +661,14 @@ public class StringUtils {
if (Character.isDigit(lastchar))
return Long.parseLong(s);
else {
- long prefix = TraditionalBinaryPrefix.valueOf(lastchar).value;
+ long prefix;
+ try {
+ prefix = TraditionalBinaryPrefix.valueOf(lastchar).value;
+ } catch (IllegalArgumentException e) {
+ throw new IllegalArgumentException("Invalid size prefix '" + lastchar
+ + "' in '" + s
+ + "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)");
+ }
long num = Long.parseLong(s.substring(0, lastpos));
if (num > (Long.MAX_VALUE/prefix) || num < (Long.MIN_VALUE/prefix)) {
throw new IllegalArgumentException(s + " does not fit in a Long");
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Mon Jan 16 04:24:24 2012
@@ -175,7 +175,7 @@
<property>
<name>io.compression.codecs</name>
- <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
+ <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec</value>
<description>A list of the compression codec classes that can be used
for compression/decompression.</description>
</property>
Propchange: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Jan 16 04:24:24 2012
@@ -1,3 +1,4 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1166009,1166402,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1183132,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204370,1204376,1204388,1205260,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1226211,1227091,1227423
+/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/core:1227776-1231827
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1161777,1161781,1162008,1162188,1162421,1162491,1162499,1162613,1162928,1162954,1162979,1163050,1163069,1163456,1163465,1163490,1163768,1163852,1163858,1164255,1164301,1164339,1166009,1166402,1167383,1170379,1170459,1171297,1172916,1173402,1176550,1177487,1177531,1177859,1177864,1182205,1183132,1189932,1189982,1195575,1196113,1196129,1204114,1204117,1204122,1204124,1204129,1204131,1204177,1204370,1204376,1204388,1205260,1205697,1206786,1206830,1207694,1208153,1208313,1212021,1212062,1212073,1212084,1213537,1213586,1213592-1213593,1213954,1214046,1220510,1221348,1225192,1225456,1225489,1225591,1226211,1226239,1227091,1227165,1227423,1229347,1230398,1231569,1231572,1231627,1231640
/hadoop/core/branches/branch-0.19/core/src/test/core:713112
/hadoop/core/trunk/src/test/core:776175-785643,785929-786278
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Mon Jan 16 04:24:24 2012
@@ -405,12 +405,16 @@ public class TestConfiguration extends T
conf.addResource(fileResource);
assertEquals(20, conf.getInt("test.int1", 0));
assertEquals(20, conf.getLong("test.int1", 0));
+ assertEquals(20, conf.getLongBytes("test.int1", 0));
assertEquals(20, conf.getInt("test.int2", 0));
assertEquals(20, conf.getLong("test.int2", 0));
+ assertEquals(20, conf.getLongBytes("test.int2", 0));
assertEquals(-20, conf.getInt("test.int3", 0));
assertEquals(-20, conf.getLong("test.int3", 0));
+ assertEquals(-20, conf.getLongBytes("test.int3", 0));
assertEquals(-20, conf.getInt("test.int4", 0));
assertEquals(-20, conf.getLong("test.int4", 0));
+ assertEquals(-20, conf.getLongBytes("test.int4", 0));
try {
conf.getInt("test.int5", 0);
fail("Property had invalid int value, but was read successfully.");
@@ -419,6 +423,26 @@ public class TestConfiguration extends T
}
}
+ public void testHumanReadableValues() throws IOException {
+ out = new BufferedWriter(new FileWriter(CONFIG));
+ startConfig();
+ appendProperty("test.humanReadableValue1", "1m");
+ appendProperty("test.humanReadableValue2", "1M");
+ appendProperty("test.humanReadableValue5", "1MBCDE");
+
+ endConfig();
+ Path fileResource = new Path(CONFIG);
+ conf.addResource(fileResource);
+ assertEquals(1048576, conf.getLongBytes("test.humanReadableValue1", 0));
+ assertEquals(1048576, conf.getLongBytes("test.humanReadableValue2", 0));
+ try {
+ conf.getLongBytes("test.humanReadableValue5", 0);
+ fail("Property had invalid human readable value, but was read successfully.");
+ } catch (NumberFormatException e) {
+ // pass
+ }
+ }
+
public void testBooleanValues() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java Mon Jan 16 04:24:24 2012
@@ -25,7 +25,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.net.NetUtilsTestResolver;
+import org.apache.hadoop.security.NetUtilsTestResolver;
import org.apache.hadoop.util.Progressable;
import org.junit.Test;
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java Mon Jan 16 04:24:24 2012
@@ -78,17 +78,16 @@ public class TestViewFsTrash {
// set up viewfs's home dir root to point to home dir root on target
// But home dir is different on linux, mac etc.
// Figure it out by calling home dir on target
-
- String homeDir = fsTarget.getHomeDirectory().toUri().getPath();
- int indexOf2ndSlash = homeDir.indexOf('/', 1);
- String homeDirRoot = homeDir.substring(0, indexOf2ndSlash);
- ConfigUtil.addLink(conf, homeDirRoot,
- fsTarget.makeQualified(new Path(homeDirRoot)).toUri());
- ConfigUtil.setHomeDirConf(conf, homeDirRoot);
- Log.info("Home dir base " + homeDirRoot);
-
+
+ String homeDirRoot = fsTarget.getHomeDirectory()
+ .getParent().toUri().getPath();
+ ConfigUtil.addLink(conf, homeDirRoot,
+ fsTarget.makeQualified(new Path(homeDirRoot)).toUri());
+ ConfigUtil.setHomeDirConf(conf, homeDirRoot);
+ Log.info("Home dir base " + homeDirRoot);
+
fsView = ViewFileSystemTestSetup.setupForViewFs(conf, fsTarget);
-
+
// set working dir so that relative paths
//fsView.setWorkingDirectory(new Path(fsTarget.getWorkingDirectory().toUri().getPath()));
conf.set("fs.defaultFS", FsConstants.VIEWFS_URI.toString());
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java Mon Jan 16 04:24:24 2012
@@ -37,6 +37,7 @@ import org.apache.commons.lang.StringUti
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.NetUtilsTestResolver;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java Mon Jan 16 04:24:24 2012
@@ -225,7 +225,7 @@ public class TestSecurityUtil {
assertTrue(!addr.isUnresolved());
// don't know what the standard resolver will return for hostname.
// should be host for host; host or ip for ip is ambiguous
- if (!SecurityUtil.getTokenServiceUseIp()) {
+ if (!SecurityUtil.useIpForTokenService) {
assertEquals(host, addr.getHostName());
assertEquals(host, addr.getAddress().getHostName());
}
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java Mon Jan 16 04:24:24 2012
@@ -143,8 +143,62 @@ public class TestStringUtils extends Uni
}
assertEquals(0L, StringUtils.TraditionalBinaryPrefix.string2long("0"));
- assertEquals(-1259520L, StringUtils.TraditionalBinaryPrefix.string2long("-1230k"));
- assertEquals(956703965184L, StringUtils.TraditionalBinaryPrefix.string2long("891g"));
+ assertEquals(1024L, StringUtils.TraditionalBinaryPrefix.string2long("1k"));
+ assertEquals(-1024L, StringUtils.TraditionalBinaryPrefix.string2long("-1k"));
+ assertEquals(1259520L,
+ StringUtils.TraditionalBinaryPrefix.string2long("1230K"));
+ assertEquals(-1259520L,
+ StringUtils.TraditionalBinaryPrefix.string2long("-1230K"));
+ assertEquals(104857600L,
+ StringUtils.TraditionalBinaryPrefix.string2long("100m"));
+ assertEquals(-104857600L,
+ StringUtils.TraditionalBinaryPrefix.string2long("-100M"));
+ assertEquals(956703965184L,
+ StringUtils.TraditionalBinaryPrefix.string2long("891g"));
+ assertEquals(-956703965184L,
+ StringUtils.TraditionalBinaryPrefix.string2long("-891G"));
+ assertEquals(501377302265856L,
+ StringUtils.TraditionalBinaryPrefix.string2long("456t"));
+ assertEquals(-501377302265856L,
+ StringUtils.TraditionalBinaryPrefix.string2long("-456T"));
+ assertEquals(11258999068426240L,
+ StringUtils.TraditionalBinaryPrefix.string2long("10p"));
+ assertEquals(-11258999068426240L,
+ StringUtils.TraditionalBinaryPrefix.string2long("-10P"));
+ assertEquals(1152921504606846976L,
+ StringUtils.TraditionalBinaryPrefix.string2long("1e"));
+ assertEquals(-1152921504606846976L,
+ StringUtils.TraditionalBinaryPrefix.string2long("-1E"));
+
+ String tooLargeNumStr = "10e";
+ try {
+ StringUtils.TraditionalBinaryPrefix.string2long(tooLargeNumStr);
+ fail("Test passed for a number " + tooLargeNumStr + " too large");
+ } catch (IllegalArgumentException e) {
+ assertEquals(tooLargeNumStr + " does not fit in a Long", e.getMessage());
+ }
+
+ String tooSmallNumStr = "-10e";
+ try {
+ StringUtils.TraditionalBinaryPrefix.string2long(tooSmallNumStr);
+ fail("Test passed for a number " + tooSmallNumStr + " too small");
+ } catch (IllegalArgumentException e) {
+ assertEquals(tooSmallNumStr + " does not fit in a Long", e.getMessage());
+ }
+
+ String invalidFormatNumStr = "10kb";
+ char invalidPrefix = 'b';
+ try {
+ StringUtils.TraditionalBinaryPrefix.string2long(invalidFormatNumStr);
+ fail("Test passed for a number " + invalidFormatNumStr
+ + " has invalid format");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Invalid size prefix '" + invalidPrefix + "' in '"
+ + invalidFormatNumStr
+ + "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)",
+ e.getMessage());
+ }
+
}
@Test
Modified: hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1231834&r1=1231833&r2=1231834&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml (original)
+++ hadoop/common/branches/branch-0.23-PB/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Mon Jan 16 04:24:24 2012
@@ -449,7 +449,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-getmerge <src> <localdst> \[addnl\]:( |\t)*Get all the files in the directories that( )*</expected-output>
+ <expected-output>^-getmerge \[-nl\] <src> <localdst>:( |\t)*Get all the files in the directories that( )*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@@ -459,6 +459,10 @@
<type>RegexpComparator</type>
<expected-output>^( |\t)*one file on local fs. <src> is kept.( )*</expected-output>
</comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^( |\t)*-nl Add a newline character at the end of each file.( )*</expected-output>
+ </comparator>
</comparators>
</test>