You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cd...@apache.org on 2008/03/27 23:38:51 UTC
svn commit: r642012 - in /hadoop/core/trunk: CHANGES.txt
src/java/org/apache/hadoop/dfs/FSNamesystem.java
src/java/org/apache/hadoop/dfs/FileDataServlet.java
src/java/org/apache/hadoop/dfs/JspHelper.java
Author: cdouglas
Date: Thu Mar 27 15:38:40 2008
New Revision: 642012
URL: http://svn.apache.org/viewvc?rev=642012&view=rev
Log:
HADOOP-3084. Fix HftpFileSystem to work for zero-lenghth files.
(cdouglas)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java
hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=642012&r1=642011&r2=642012&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Mar 27 15:38:40 2008
@@ -441,6 +441,9 @@
HADOOP-3070. Protect the trash emptier thread from null pointer
exceptions. (Koji Noguchi via omalley)
+ HADOOP-3084. Fix HftpFileSystem to work for zero-lenghth files.
+ (cdouglas)
+
Release 0.16.1 - 2008-03-13
INCOMPATIBLE CHANGES
Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?rev=642012&r1=642011&r2=642012&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Thu Mar 27 15:38:40 2008
@@ -3577,6 +3577,7 @@
}
/** Stop at and return the datanode at index (used for content browsing)*/
+ @Deprecated
private DatanodeDescriptor getDatanodeByIndex(int index) {
int i = 0;
for (DatanodeDescriptor node : datanodeMap.values()) {
@@ -3588,6 +3589,7 @@
return null;
}
+ @Deprecated
public String randomDataNode() {
int size = datanodeMap.size();
int index = 0;
@@ -3603,6 +3605,10 @@
}
}
return null;
+ }
+
+ public DatanodeDescriptor getRandomDatanode() {
+ return replicator.chooseTarget(1, null, null, 0)[0];
}
public int getNameNodeInfoPort() {
Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java?rev=642012&r1=642011&r2=642012&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java Thu Mar 27 15:38:40 2008
@@ -38,51 +38,35 @@
private URI createUri(DFSFileInfo i, UnixUserGroupInformation ugi,
ClientProtocol nnproxy, String scheme)
throws IOException, URISyntaxException {
- final DatanodeInfo host = pickSrcDatanode(i, nnproxy);
- return new URI(scheme, null, host.getHostName(),
+ final DatanodeID host = pickSrcDatanode(i, nnproxy);
+ final String hostname;
+ if (host instanceof DatanodeInfo) {
+ hostname = ((DatanodeInfo)host).getHostName();
+ } else {
+ hostname = host.getHost();
+ }
+ return new URI(scheme, null, hostname,
"https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort(),
"/streamFile", "filename=" + i.getPath() + "&ugi=" + ugi, null);
}
- private final static int BLOCK_SAMPLE = 5;
+ private final static JspHelper jspHelper = new JspHelper();
/** Select a datanode to service this request.
* Currently, this looks at no more than the first five blocks of a file,
* selecting a datanode randomly from the most represented.
*/
- private static DatanodeInfo pickSrcDatanode(DFSFileInfo i,
+ private static DatanodeID pickSrcDatanode(DFSFileInfo i,
ClientProtocol nnproxy) throws IOException {
- long sample;
- if (i.getLen() == 0) sample = 1;
- else sample = i.getLen() / i.getBlockSize() > BLOCK_SAMPLE
- ? i.getBlockSize() * BLOCK_SAMPLE - 1
- : i.getLen();
final LocatedBlocks blks = nnproxy.getBlockLocations(
- i.getPath().toUri().getPath(), 0, sample);
- HashMap<DatanodeInfo, Integer> count = new HashMap<DatanodeInfo, Integer>();
- for (LocatedBlock b : blks.getLocatedBlocks()) {
- for (DatanodeInfo d : b.getLocations()) {
- if (!count.containsKey(d)) {
- count.put(d, 0);
- }
- count.put(d, count.get(d) + 1);
- }
- }
- ArrayList<DatanodeInfo> loc = new ArrayList<DatanodeInfo>();
- int max = 0;
- for (Map.Entry<DatanodeInfo, Integer> e : count.entrySet()) {
- if (e.getValue() > max) {
- loc.clear();
- max = e.getValue();
- }
- if (e.getValue() == max) {
- loc.add(e.getKey());
- }
+ i.getPath().toUri().getPath(), 0, 1);
+ if (i.getLen() == 0 || blks.getLocatedBlocks().size() <= 0) {
+ // pick a random datanode
+ return jspHelper.randomNode();
}
- final Random r = new Random();
- return loc.get(r.nextInt(loc.size()));
+ return jspHelper.bestNode(blks.get(0));
}
/**
Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java?rev=642012&r1=642011&r2=642012&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java Thu Mar 27 15:38:40 2008
@@ -65,6 +65,11 @@
UnixUserGroupInformation.saveToConf(conf,
UnixUserGroupInformation.UGI_PROPERTY_NAME, webUGI);
}
+
+ public DatanodeID randomNode() throws IOException {
+ return fsn.getRandomDatanode();
+ }
+
public DatanodeInfo bestNode(LocatedBlock blk) throws IOException {
TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>();
DatanodeInfo chosenNode = null;