You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ac...@apache.org on 2011/05/03 17:13:58 UTC
svn commit: r1099090 - in /hadoop/common/branches/branch-0.20-security: ./
src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/http/ src/mapred/
src/mapred/org/apache/hadoop/mapred/lib/ src/test/org/apache/hadoop/fs/
src/test/org/apache/hadoop/ma...
Author: acmurthy
Date: Tue May 3 15:13:58 2011
New Revision: 1099090
URL: http://svn.apache.org/viewvc?rev=1099090&view=rev
Log:
Merge -r 1099087:1099088 from branch-0.20-security-203 to fix HADOOP-5759.
Modified:
hadoop/common/branches/branch-0.20-security/ (props changed)
hadoop/common/branches/branch-0.20-security/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/fs/HarFileSystem.java (props changed)
hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/branch-0.20-security/src/mapred/ (props changed)
hadoop/common/branches/branch-0.20-security/src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/TestHarFileSystem.java (props changed)
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/mapred/lib/TestCombineFileInputFormat.java
hadoop/common/branches/branch-0.20-security/src/tools/org/apache/hadoop/tools/HadoopArchives.java (props changed)
Propchange: hadoop/common/branches/branch-0.20-security/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue May 3 15:13:58 2011
@@ -1,4 +1,4 @@
/hadoop/common/branches/branch-0.20:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946
-/hadoop/common/branches/branch-0.20-security-203:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863
+/hadoop/common/branches/branch-0.20-security-203:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088
/hadoop/core/branches/branch-0.19:713112
/hadoop/core/trunk:727001,727117,727191,727212,727217,727228,727255,727869,728187,729052,729987,732385,732572,732613,732777,732838,732869,733887,734870,734916,736426,738328,738697,740077,740157,741703,741762,743745,743816,743892,744894,745180,746010,746206,746227,746233,746274,746338,746902-746903,746925,746944,746968,746970,747279,747289,747802,748084,748090,748783,749262,749318,749863,750533,752073,752609,752834,752836,752913,752932,753112-753113,753346,754645,754847,754927,755035,755226,755348,755370,755418,755426,755790,755905,755938,755960,755986,755998,756352,757448,757624,757849,758156,758180,759398,759932,760502,760783,761046,761482,761632,762216,762879,763107,763502,764967,765016,765809,765951,771607,771661,772844,772876,772884,772920,773889,776638,778962,778966,779893,781720,784661,785046,785569
Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1099090&r1=1099089&r2=1099090&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Tue May 3 15:13:58 2011
@@ -1786,8 +1786,8 @@ Release 0.20.2 - Unreleased
HADOOP-6269. Fix threading issue with defaultResource in Configuration.
(Sreekanth Ramakrishnan via cdouglas)
- HADOOP-6386. NameNode's HttpServer can't instantiate InetSocketAddress:
- IllegalArgumentException is thrown. (cos)
+ HADOOP-5759. Fix for IllegalArgumentException when CombineFileInputFormat
+ is used as job InputFormat. (Amareshwari Sriramadasu via zshao)
Release 0.20.1 - 2009-09-01
Propchange: hadoop/common/branches/branch-0.20-security/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue May 3 15:13:58 2011
@@ -1,5 +1,5 @@
/hadoop/common/branches/branch-0.20/CHANGES.txt:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946
-/hadoop/common/branches/branch-0.20-security-203/CHANGES.txt:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863
+/hadoop/common/branches/branch-0.20-security-203/CHANGES.txt:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088
/hadoop/core/branches/branch-0.18/CHANGES.txt:727226
/hadoop/core/branches/branch-0.19/CHANGES.txt:713112
/hadoop/core/trunk/CHANGES.txt:727001,727117,727191,727212,727228,727255,727869,728187,729052,729987,732385,732572,732613,732777,732838,732869,733887,734870,734916,735082,736426,738602,738697,739416,740077,740157,741703,741762,743296,743745,743816,743892,744894,745180,745268,746010,746193,746206,746227,746233,746274,746902-746903,746925,746944,746968,746970,747279,747289,747802,748084,748090,748783,749262,749318,749863,750533,752073,752514,752555,752590,752609,752834,752836,752913,752932,753112-753113,753346,754645,754847,754927,755035,755226,755348,755370,755418,755426,755790,755905,755938,755986,755998,756352,757448,757624,757849,758156,758180,759398,759932,760502,760783,761046,761482,761632,762216,762879,763107,763502,764967,765016,765809,765951,771607,772844,772876,772884,772920,773889,776638,778962,778966,779893,781720,784661,785046,785569
Propchange: hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/fs/HarFileSystem.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue May 3 15:13:58 2011
@@ -1,4 +1,4 @@
-/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/fs/HarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863
+/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/fs/HarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088
/hadoop/common/trunk/src/core/org/apache/hadoop/fs/HarFileSystem.java:910709
/hadoop/common/trunk/src/java/org/apache/hadoop/fs/HarFileSystem.java:979485
/hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/HarFileSystem.java:713112
Modified: hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/http/HttpServer.java?rev=1099090&r1=1099089&r2=1099090&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/http/HttpServer.java Tue May 3 15:13:58 2011
@@ -596,33 +596,6 @@ public class HttpServer implements Filte
} //Workaround end
LOG.info("Jetty bound to port " + port);
webServer.start();
- // Workaround for HADOOP-6386
- if (port < 0) {
- Random r = new Random(1000);
- for (int i = 0; i < MAX_RETRIES/2; i++) {
- try {
- webServer.stop();
- } catch (Exception e) {
- LOG.warn("Can't stop web-server", e);
- }
- LOG.info("Bouncing the listener");
- listener.close();
- Thread.sleep(r.nextInt());
- listener.setPort(oriPort == 0 ? 0 : (oriPort += 1));
- listener.open();
- Thread.sleep(100);
-
- webServer.start();
- Thread.sleep(r.nextInt());
- port = listener.getLocalPort();
- if (port > 0)
- break;
- }
- if (port < 0)
- throw new BindException("listener.getLocalPort() is returning " +
- "less than 0 even after " +MAX_RETRIES+" resets");
- }
- // End of HADOOP-6386 workaround
break;
} catch (IOException ex) {
// if this is a bind exception,
Propchange: hadoop/common/branches/branch-0.20-security/src/mapred/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue May 3 15:13:58 2011
@@ -1,5 +1,5 @@
/hadoop/common/branches/branch-0.20/src/mapred:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946
-/hadoop/common/branches/branch-0.20-security-203/src/mapred:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863
+/hadoop/common/branches/branch-0.20-security-203/src/mapred:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088
/hadoop/core/branches/branch-0.19/src/mapred:713112
/hadoop/core/trunk/src/mapred:727001,727117,727191,727212,727217,727228,727255,727869,728187,729052,729987,732385,732572,732613,732777,732838,732869,733887,734870,734916,736426,738328,738697,740077,740157,741703,741762,743745,743816,743892,744894,745180,746010,746206,746227,746233,746274,746338,746902-746903,746925,746944,746968,746970,747279,747289,747802,748084,748090,748783,749262,749318,749863,750533,752073,752609,752834,752836,752913,752932,753112-753113,753346,754645,754847,754927,755035,755226,755348,755370,755418,755426,755790,755905,755938,755960,755986,755998,756352,757448,757624,757849,758156,758180,759398,759932,760502,760783,761046,761482,761632,762216,762879,763107,763502,764967,765016,765809,765951,771607,771661,772844,772876,772884,772920,773889,776638,778962,778966,779893,781720,784661,785046,785569
/hadoop/mapreduce/trunk/src/java:808650
Modified: hadoop/common/branches/branch-0.20-security/src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java?rev=1099090&r1=1099089&r2=1099090&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/mapred/org/apache/hadoop/mapred/lib/CombineFileInputFormat.java Tue May 3 15:13:58 2011
@@ -20,12 +20,12 @@ package org.apache.hadoop.mapred.lib;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.HashSet;
import java.util.List;
import java.util.HashMap;
import java.util.Set;
import java.util.Iterator;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -73,6 +73,9 @@ public abstract class CombineFileInputFo
// across multiple pools.
private ArrayList<MultiPathFilter> pools = new ArrayList<MultiPathFilter>();
+ // mapping from a rack name to the set of Nodes in the rack
+ private static HashMap<String, Set<String>> rackToNodes =
+ new HashMap<String, Set<String>>();
/**
* Specify the maximum size (in bytes) of each split. Each split is
* approximately equal to the specified size.
@@ -214,6 +217,8 @@ public abstract class CombineFileInputFo
getMoreSplits(job, myPaths.toArray(new Path[myPaths.size()]),
maxSize, minSizeNode, minSizeRack, splits);
+ // free up rackToNodes map
+ rackToNodes.clear();
return splits.toArray(new CombineFileSplit[splits.size()]);
}
@@ -341,7 +346,7 @@ public abstract class CombineFileInputFo
// create this split.
if (maxSize != 0 && curSplitSize >= maxSize) {
// create an input split and add it to the splits array
- addCreatedSplit(job, splits, racks, validBlocks);
+ addCreatedSplit(job, splits, getHosts(racks), validBlocks);
createdSplit = true;
break;
}
@@ -360,7 +365,7 @@ public abstract class CombineFileInputFo
if (minSizeRack != 0 && curSplitSize >= minSizeRack) {
// if there is a mimimum size specified, then create a single split
// otherwise, store these blocks into overflow data structure
- addCreatedSplit(job, splits, racks, validBlocks);
+ addCreatedSplit(job, splits, getHosts(racks), validBlocks);
} else {
// There were a few blocks in this rack that remained to be processed.
// Keep them in 'overflow' block list. These will be combined later.
@@ -393,7 +398,7 @@ public abstract class CombineFileInputFo
// create this split.
if (maxSize != 0 && curSplitSize >= maxSize) {
// create an input split and add it to the splits array
- addCreatedSplit(job, splits, racks, validBlocks);
+ addCreatedSplit(job, splits, getHosts(racks), validBlocks);
curSplitSize = 0;
validBlocks.clear();
racks.clear();
@@ -402,7 +407,7 @@ public abstract class CombineFileInputFo
// Process any remaining blocks, if any.
if (!validBlocks.isEmpty()) {
- addCreatedSplit(job, splits, racks, validBlocks);
+ addCreatedSplit(job, splits, getHosts(racks), validBlocks);
}
}
@@ -412,13 +417,12 @@ public abstract class CombineFileInputFo
*/
private void addCreatedSplit(JobConf job,
List<CombineFileSplit> splitList,
- List<String> racks,
+ List<String> locations,
ArrayList<OneBlockInfo> validBlocks) {
// create an input split
Path[] fl = new Path[validBlocks.size()];
long[] offset = new long[validBlocks.size()];
long[] length = new long[validBlocks.size()];
- String[] rackLocations = racks.toArray(new String[racks.size()]);
for (int i = 0; i < validBlocks.size(); i++) {
fl[i] = validBlocks.get(i).onepath;
offset[i] = validBlocks.get(i).offset;
@@ -427,7 +431,7 @@ public abstract class CombineFileInputFo
// add this split to the list that is returned
CombineFileSplit thissplit = new CombineFileSplit(job, fl, offset,
- length, rackLocations);
+ length, locations.toArray(new String[0]));
splitList.add(thissplit);
}
@@ -484,7 +488,9 @@ public abstract class CombineFileInputFo
rackToBlocks.put(rack, blklist);
}
blklist.add(oneblock);
- }
+ // Add this host to rackToNodes map
+ addHostToRack(oneblock.racks[j], oneblock.hosts[j]);
+ }
// add this block to the node --> block map
for (int j = 0; j < oneblock.hosts.length; j++) {
@@ -547,6 +553,23 @@ public abstract class CombineFileInputFo
}
}
+ private static void addHostToRack(String rack, String host) {
+ Set<String> hosts = rackToNodes.get(rack);
+ if (hosts == null) {
+ hosts = new HashSet<String>();
+ rackToNodes.put(rack, hosts);
+ }
+ hosts.add(host);
+ }
+
+ private static List<String> getHosts(List<String> racks) {
+ List<String> hosts = new ArrayList<String>();
+ for (String rack : racks) {
+ hosts.addAll(rackToNodes.get(rack));
+ }
+ return hosts;
+ }
+
/**
* Accept a path only if any one of filters given in the
* constructor do.
Propchange: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue May 3 15:13:58 2011
@@ -1,4 +1,4 @@
-/hadoop/common/branches/branch-0.20-security-203/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863
+/hadoop/common/branches/branch-0.20-security-203/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088
/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestHarFileSystem.java:979485
/hadoop/common/trunk/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:910709
/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:713112
Modified: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/mapred/lib/TestCombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/mapred/lib/TestCombineFileInputFormat.java?rev=1099090&r1=1099089&r2=1099090&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/mapred/lib/TestCombineFileInputFormat.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/mapred/lib/TestCombineFileInputFormat.java Tue May 3 15:13:58 2011
@@ -18,11 +18,6 @@
package org.apache.hadoop.mapred.lib;
import java.io.IOException;
-import java.io.DataOutputStream;
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Random;
import junit.framework.TestCase;
@@ -30,17 +25,12 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
@@ -151,14 +141,14 @@ public class TestCombineFileInputFormat
assertEquals(fileSplit.getPath(1).getName(), file2.getName());
assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r2");
+ assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
fileSplit = (CombineFileSplit) splits[1];
assertEquals(fileSplit.getNumPaths(), 1);
assertEquals(fileSplit.getLocations().length, 1);
assertEquals(fileSplit.getPath(0).getName(), file1.getName());
assertEquals(fileSplit.getOffset(0), 0);
assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r1");
+ assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
// create another file on 3 datanodes and 3 racks.
dfs.startDataNodes(conf, 1, true, null, rack3, hosts3, null);
@@ -186,7 +176,7 @@ public class TestCombineFileInputFormat
assertEquals(fileSplit.getPath(2).getName(), file3.getName());
assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r3");
+ assertEquals(fileSplit.getLocations()[0], hosts3[0]); // should be on r3
fileSplit = (CombineFileSplit) splits[1];
assertEquals(fileSplit.getNumPaths(), 2);
assertEquals(fileSplit.getLocations().length, 1);
@@ -196,14 +186,14 @@ public class TestCombineFileInputFormat
assertEquals(fileSplit.getPath(1).getName(), file2.getName());
assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r2");
+ assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
fileSplit = (CombineFileSplit) splits[2];
assertEquals(fileSplit.getNumPaths(), 1);
assertEquals(fileSplit.getLocations().length, 1);
assertEquals(fileSplit.getPath(0).getName(), file1.getName());
assertEquals(fileSplit.getOffset(0), 0);
assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r1");
+ assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
// create file4 on all three racks
Path file4 = new Path(dir4 + "/file4");
@@ -229,7 +219,7 @@ public class TestCombineFileInputFormat
assertEquals(fileSplit.getPath(2).getName(), file3.getName());
assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r3");
+ assertEquals(fileSplit.getLocations()[0], hosts3[0]); // should be on r3
fileSplit = (CombineFileSplit) splits[1];
assertEquals(fileSplit.getNumPaths(), 2);
assertEquals(fileSplit.getLocations().length, 1);
@@ -239,14 +229,14 @@ public class TestCombineFileInputFormat
assertEquals(fileSplit.getPath(1).getName(), file2.getName());
assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r2");
+ assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
fileSplit = (CombineFileSplit) splits[2];
assertEquals(fileSplit.getNumPaths(), 1);
assertEquals(fileSplit.getLocations().length, 1);
assertEquals(fileSplit.getPath(0).getName(), file1.getName());
assertEquals(fileSplit.getOffset(0), 0);
assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r1");
+ assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
// maximum split size is 2 blocks
inFormat = new DummyInputFormat();
@@ -385,7 +375,7 @@ public class TestCombineFileInputFormat
assertEquals(fileSplit.getPath(0).getName(), file1.getName());
assertEquals(fileSplit.getOffset(0), 0);
assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "/r1");
+ assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
// maximum split size is 7 blocks and min is 3 blocks
inFormat = new DummyInputFormat();
@@ -431,15 +421,15 @@ public class TestCombineFileInputFormat
fileSplit = (CombineFileSplit) splits[0];
assertEquals(fileSplit.getNumPaths(), 2);
assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], "/r2");
+ assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
fileSplit = (CombineFileSplit) splits[1];
assertEquals(fileSplit.getNumPaths(), 1);
assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], "/r1");
+ assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
fileSplit = (CombineFileSplit) splits[2];
assertEquals(fileSplit.getNumPaths(), 6);
assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], "/r3");
+ assertEquals(fileSplit.getLocations()[0], hosts3[0]); // should be on r3
} finally {
if (dfs != null) {
dfs.shutdown();
Propchange: hadoop/common/branches/branch-0.20-security/src/tools/org/apache/hadoop/tools/HadoopArchives.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue May 3 15:13:58 2011
@@ -1,4 +1,4 @@
-/hadoop/common/branches/branch-0.20-security-203/src/tools/org/apache/hadoop/tools/HadoopArchives.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863
+/hadoop/common/branches/branch-0.20-security-203/src/tools/org/apache/hadoop/tools/HadoopArchives.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088
/hadoop/common/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java:910709
/hadoop/core/branches/branch-0.19/src/tools/org/apache/hadoop/tools/HadoopArchives.java:713112
/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java:727001,727117,727191,727212,727217,727228,727255,727869,728187,729052,729987,732385,732572,732613,732777,732838,732869,733887,734870,734916,736426,738328,738697,740077,740157,741703,741762,743745,743816,743892,744894,745180,746010,746206,746227,746233,746274,746338,746902-746903,746925,746944,746968,746970,747279,747289,747802,748084,748090,748783,749262,749318,749863,750533,752073,752609,752834,752836,752913,752932,753112-753113,753346,754645,754847,754927,755035,755226,755348,755370,755418,755426,755790,755905,755938,755960,755986,755998,756352,757448,757624,757849,758156,758180,759398,759932,760502,760783,761046,761482,761632,762216,762879,763107,763502,764967,765016,765809,765951,771607,771661,772844,772876,772884,772920,773889,776638,778962,778966,779893,781720,784661,785046,785569