You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2011/09/23 03:40:51 UTC
svn commit: r1174490 - in /hadoop/common/branches/branch-0.20-security-205:
./ src/core/org/apache/hadoop/fs/ src/hdfs/org/apache/hadoop/hdfs/
src/hdfs/org/apache/hadoop/hdfs/protocol/
src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/hdfs/org/apach...
Author: szetszwo
Date: Fri Sep 23 01:40:50 2011
New Revision: 1174490
URL: http://svn.apache.org/viewvc?rev=1174490&view=rev
Log:
svn merge -c 1173470 from branch-0.20-security for HDFS-2340.
Added:
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/RenewerParam.java
- copied unchanged from r1173470, hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/RenewerParam.java
Modified:
hadoop/common/branches/branch-0.20-security-205/ (props changed)
hadoop/common/branches/branch-0.20-security-205/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-0.20-security-205/src/core/org/apache/hadoop/fs/HarFileSystem.java (props changed)
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSUtil.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
hadoop/common/branches/branch-0.20-security-205/src/mapred/ (props changed)
hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/fs/TestHarFileSystem.java (props changed)
hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java
hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
hadoop/common/branches/branch-0.20-security-205/src/tools/org/apache/hadoop/tools/HadoopArchives.java (props changed)
Propchange: hadoop/common/branches/branch-0.20-security-205/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Sep 23 01:40:50 2011
@@ -1,6 +1,6 @@
/hadoop/common/branches/branch-0.20:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225
/hadoop/common/branches/branch-0.20-append:955380,955398,955448,956329
-/hadoop/common/branches/branch-0.20-security:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1174471
+/hadoop/common/branches/branch-0.20-security:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1174471
/hadoop/common/branches/branch-0.20-security-203:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115
/hadoop/common/branches/branch-0.20-security-204:1128390,1147228,1148069,1149316,1154413
/hadoop/core/branches/branch-0.19:713112
Modified: hadoop/common/branches/branch-0.20-security-205/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/CHANGES.txt?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security-205/CHANGES.txt Fri Sep 23 01:40:50 2011
@@ -42,6 +42,9 @@ Release 0.20.205.0 - 2011.09.12
HDFS-2318. Provide authentication to webhdfs using SPNEGO and delegation
tokens. (szetszwo)
+ HDFS-2340. Support getFileBlockLocations and getDelegationToken in webhdfs.
+ (szetszwo)
+
BUG FIXES
MAPREDUCE-2324. Removed usage of broken
Propchange: hadoop/common/branches/branch-0.20-security-205/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Sep 23 01:40:50 2011
@@ -1,6 +1,6 @@
/hadoop/common/branches/branch-0.20/CHANGES.txt:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225
/hadoop/common/branches/branch-0.20-append/CHANGES.txt:955380,955398,955448,956329
-/hadoop/common/branches/branch-0.20-security/CHANGES.txt:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173843,1174326,1174471,1174476,1174482
+/hadoop/common/branches/branch-0.20-security/CHANGES.txt:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470,1173843,1174326,1174471,1174476,1174482
/hadoop/common/branches/branch-0.20-security-203/CHANGES.txt:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1102071,1128115
/hadoop/common/branches/branch-0.20-security-204/CHANGES.txt:1128390,1147228,1148069,1149316,1152887,1154413,1159730,1161741
/hadoop/core/branches/branch-0.18/CHANGES.txt:727226
Propchange: hadoop/common/branches/branch-0.20-security-205/src/core/org/apache/hadoop/fs/HarFileSystem.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Sep 23 01:40:50 2011
@@ -1,6 +1,6 @@
/hadoop/common/branches/branch-0.20/src/core/org/apache/hadoop/fs/HarFileSystem.java:990003
/hadoop/common/branches/branch-0.20-append/src/core/org/apache/hadoop/fs/HarFileSystem.java:955380,955398,955448,956329
-/hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/fs/HarFileSystem.java:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192
+/hadoop/common/branches/branch-0.20-security/src/core/org/apache/hadoop/fs/HarFileSystem.java:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470
/hadoop/common/branches/branch-0.20-security-203/src/core/org/apache/hadoop/fs/HarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1128115
/hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/HarFileSystem.java:1128390
/hadoop/common/trunk/src/core/org/apache/hadoop/fs/HarFileSystem.java:910709
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Fri Sep 23 01:40:50 2011
@@ -458,31 +458,7 @@ public class DFSClient implements FSCons
public BlockLocation[] getBlockLocations(String src, long start,
long length) throws IOException {
LocatedBlocks blocks = callGetBlockLocations(namenode, src, start, length);
- if (blocks == null) {
- return new BlockLocation[0];
- }
- int nrBlocks = blocks.locatedBlockCount();
- BlockLocation[] blkLocations = new BlockLocation[nrBlocks];
- int idx = 0;
- for (LocatedBlock blk : blocks.getLocatedBlocks()) {
- assert idx < nrBlocks : "Incorrect index";
- DatanodeInfo[] locations = blk.getLocations();
- String[] hosts = new String[locations.length];
- String[] names = new String[locations.length];
- String[] racks = new String[locations.length];
- for (int hCnt = 0; hCnt < locations.length; hCnt++) {
- hosts[hCnt] = locations[hCnt].getHostName();
- names[hCnt] = locations[hCnt].getName();
- NodeBase node = new NodeBase(names[hCnt],
- locations[hCnt].getNetworkLocation());
- racks[hCnt] = node.toString();
- }
- blkLocations[idx] = new BlockLocation(names, hosts, racks,
- blk.getStartOffset(),
- blk.getBlockSize());
- idx++;
- }
- return blkLocations;
+ return DFSUtil.locatedBlocks2Locations(blocks);
}
public DFSInputStream open(String src) throws IOException {
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSUtil.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSUtil.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/DFSUtil.java Fri Sep 23 01:40:50 2011
@@ -19,9 +19,14 @@
package org.apache.hadoop.hdfs;
import java.io.UnsupportedEncodingException;
-
import java.util.StringTokenizer;
+
+import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.net.NodeBase;
public class DFSUtil {
/**
@@ -72,5 +77,38 @@ public class DFSUtil {
}
return null;
}
+
+ /**
+ * Convert a LocatedBlocks to BlockLocations[]
+ * @param blocks a LocatedBlocks
+ * @return an array of BlockLocations
+ */
+ public static BlockLocation[] locatedBlocks2Locations(LocatedBlocks blocks) {
+ if (blocks == null) {
+ return new BlockLocation[0];
+ }
+ int nrBlocks = blocks.locatedBlockCount();
+ BlockLocation[] blkLocations = new BlockLocation[nrBlocks];
+ int idx = 0;
+ for (LocatedBlock blk : blocks.getLocatedBlocks()) {
+ assert idx < nrBlocks : "Incorrect index";
+ DatanodeInfo[] locations = blk.getLocations();
+ String[] hosts = new String[locations.length];
+ String[] names = new String[locations.length];
+ String[] racks = new String[locations.length];
+ for (int hCnt = 0; hCnt < locations.length; hCnt++) {
+ hosts[hCnt] = locations[hCnt].getHostName();
+ names[hCnt] = locations[hCnt].getName();
+ NodeBase node = new NodeBase(names[hCnt],
+ locations[hCnt].getNetworkLocation());
+ racks[hCnt] = node.toString();
+ }
+ blkLocations[idx] = new BlockLocation(names, hosts, racks,
+ blk.getStartOffset(),
+ blk.getBlockSize());
+ idx++;
+ }
+ return blkLocations;
+ }
}
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java Fri Sep 23 01:40:50 2011
@@ -88,6 +88,25 @@ public class DatanodeInfo extends Datano
this.hostName = hostName;
}
+ /** Constructor */
+ public DatanodeInfo(final String name, final String storageID,
+ final int infoPort, final int ipcPort,
+ final long capacity, final long dfsUsed, final long remaining,
+ final long lastUpdate, final int xceiverCount,
+ final String networkLocation, final String hostName,
+ final AdminStates adminState) {
+ super(name, storageID, infoPort, ipcPort);
+
+ this.capacity = capacity;
+ this.dfsUsed = dfsUsed;
+ this.remaining = remaining;
+ this.lastUpdate = lastUpdate;
+ this.xceiverCount = xceiverCount;
+ this.location = networkLocation;
+ this.hostName = hostName;
+ this.adminState = adminState;
+ }
+
/** The raw capacity. */
public long getCapacity() { return capacity; }
@@ -266,7 +285,7 @@ public class DatanodeInfo extends Datano
/**
* Retrieves the admin state of this node.
*/
- AdminStates getAdminState() {
+ public AdminStates getAdminState() {
if (adminState == null) {
return AdminStates.NORMAL;
}
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java Fri Sep 23 01:40:50 2011
@@ -562,8 +562,11 @@ public class NameNode implements ClientP
}
private static String getClientMachine() {
- String clientMachine = Server.getRemoteAddress();
- if (clientMachine == null) {
+ String clientMachine = NamenodeWebHdfsMethods.getRemoteAddress();
+ if (clientMachine == null) { //not a web client
+ clientMachine = Server.getRemoteAddress();
+ }
+ if (clientMachine == null) { //not a RPC client
clientMachine = "";
}
return clientMachine;
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Fri Sep 23 01:40:50 2011
@@ -73,6 +73,7 @@ import org.apache.hadoop.hdfs.web.resour
import org.apache.hadoop.hdfs.web.resources.PostOpParam;
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
+import org.apache.hadoop.hdfs.web.resources.RenewerParam;
import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
import org.apache.hadoop.hdfs.web.resources.UserParam;
@@ -86,7 +87,14 @@ import org.apache.hadoop.security.token.
/** Web-hdfs NameNode implementation. */
@Path("")
public class NamenodeWebHdfsMethods {
- private static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
+ public static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
+
+ private static final ThreadLocal<String> REMOTE_ADDRESS = new ThreadLocal<String>();
+
+ /** @return the remote client address. */
+ public static String getRemoteAddress() {
+ return REMOTE_ADDRESS.get();
+ }
private @Context ServletContext context;
private @Context HttpServletRequest request;
@@ -205,6 +213,8 @@ public class NamenodeWebHdfsMethods {
return ugi.doAs(new PrivilegedExceptionAction<Response>() {
@Override
public Response run() throws IOException, URISyntaxException {
+ REMOTE_ADDRESS.set(request.getRemoteAddr());
+ try {
final String fullpath = path.getAbsolutePath();
final NameNode namenode = (NameNode)context.getAttribute("name.node");
@@ -253,6 +263,10 @@ public class NamenodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+
+ } finally {
+ REMOTE_ADDRESS.set(null);
+ }
}
});
}
@@ -282,6 +296,8 @@ public class NamenodeWebHdfsMethods {
return ugi.doAs(new PrivilegedExceptionAction<Response>() {
@Override
public Response run() throws IOException, URISyntaxException {
+ REMOTE_ADDRESS.set(request.getRemoteAddr());
+ try {
final String fullpath = path.getAbsolutePath();
final NameNode namenode = (NameNode)context.getAttribute("name.node");
@@ -296,6 +312,10 @@ public class NamenodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+
+ } finally {
+ REMOTE_ADDRESS.set(null);
+ }
}
});
}
@@ -316,10 +336,12 @@ public class NamenodeWebHdfsMethods {
final OffsetParam offset,
@QueryParam(LengthParam.NAME) @DefaultValue(LengthParam.DEFAULT)
final LengthParam length,
+ @QueryParam(RenewerParam.NAME) @DefaultValue(RenewerParam.DEFAULT)
+ final RenewerParam renewer,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
) throws IOException, URISyntaxException, InterruptedException {
- return get(ugi, delegation, ROOT, op, offset, length, bufferSize);
+ return get(ugi, delegation, ROOT, op, offset, length, renewer, bufferSize);
}
/** Handle HTTP GET request. */
@@ -337,19 +359,23 @@ public class NamenodeWebHdfsMethods {
final OffsetParam offset,
@QueryParam(LengthParam.NAME) @DefaultValue(LengthParam.DEFAULT)
final LengthParam length,
+ @QueryParam(RenewerParam.NAME) @DefaultValue(RenewerParam.DEFAULT)
+ final RenewerParam renewer,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
) throws IOException, URISyntaxException, InterruptedException {
if (LOG.isTraceEnabled()) {
LOG.trace(op + ": " + path + ", ugi=" + ugi
- + Param.toSortedString(", ", offset, length, bufferSize));
+ + Param.toSortedString(", ", offset, length, renewer, bufferSize));
}
return ugi.doAs(new PrivilegedExceptionAction<Response>() {
@Override
public Response run() throws IOException, URISyntaxException {
+ REMOTE_ADDRESS.set(request.getRemoteAddr());
+ try {
final NameNode namenode = (NameNode)context.getAttribute("name.node");
final String fullpath = path.getAbsolutePath();
@@ -361,6 +387,15 @@ public class NamenodeWebHdfsMethods {
op.getValue(), offset.getValue(), offset, length, bufferSize);
return Response.temporaryRedirect(uri).build();
}
+ case GETFILEBLOCKLOCATIONS:
+ {
+ final long offsetValue = offset.getValue();
+ final Long lengthValue = length.getValue();
+ final LocatedBlocks locatedblocks = namenode.getBlockLocations(fullpath,
+ offsetValue, lengthValue != null? lengthValue: offsetValue + 1);
+ final String js = JsonUtil.toJsonString(locatedblocks);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+ }
case GETFILESTATUS:
{
final HdfsFileStatus status = namenode.getFileInfo(fullpath);
@@ -372,9 +407,20 @@ public class NamenodeWebHdfsMethods {
final StreamingOutput streaming = getListingStream(namenode, fullpath);
return Response.ok(streaming).type(MediaType.APPLICATION_JSON).build();
}
+ case GETDELEGATIONTOKEN:
+ {
+ final Token<? extends TokenIdentifier> token = generateDelegationToken(
+ namenode, ugi, renewer.getValue());
+ final String js = JsonUtil.toJsonString(token);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+ }
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+
+ } finally {
+ REMOTE_ADDRESS.set(null);
+ }
}
});
}
@@ -442,6 +488,9 @@ public class NamenodeWebHdfsMethods {
return ugi.doAs(new PrivilegedExceptionAction<Response>() {
@Override
public Response run() throws IOException {
+ REMOTE_ADDRESS.set(request.getRemoteAddr());
+ try {
+
final NameNode namenode = (NameNode)context.getAttribute("name.node");
final String fullpath = path.getAbsolutePath();
@@ -455,6 +504,10 @@ public class NamenodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+
+ } finally {
+ REMOTE_ADDRESS.set(null);
+ }
}
});
}
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java Fri Sep 23 01:40:50 2011
@@ -17,19 +17,34 @@
*/
package org.apache.hadoop.hdfs.web;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.mortbay.util.ajax.JSON;
/** JSON Utilities */
public class JsonUtil {
- private static final ThreadLocal<Map<String, Object>> jsonMap
- = new ThreadLocal<Map<String, Object>>() {
+ private static class ThreadLocalMap extends ThreadLocal<Map<String, Object>> {
@Override
protected Map<String, Object> initialValue() {
return new TreeMap<String, Object>();
@@ -41,7 +56,54 @@ public class JsonUtil {
m.clear();
return m;
}
- };
+ }
+
+ private static final ThreadLocalMap jsonMap = new ThreadLocalMap();
+ private static final ThreadLocalMap tokenMap = new ThreadLocalMap();
+ private static final ThreadLocalMap datanodeInfoMap = new ThreadLocalMap();
+ private static final ThreadLocalMap BlockMap = new ThreadLocalMap();
+ private static final ThreadLocalMap locatedBlockMap = new ThreadLocalMap();
+
+ private static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
+
+ /** Convert a token object to a Json string. */
+ public static String toJsonString(final Token<? extends TokenIdentifier> token
+ ) throws IOException {
+ if (token == null) {
+ return null;
+ }
+
+ final Map<String, Object> m = tokenMap.get();
+ m.put("urlString", token.encodeToUrlString());
+ return JSON.toString(m);
+ }
+
+ /** Convert a Json map to a Token. */
+ public static Token<? extends TokenIdentifier> toToken(
+ final Map<?, ?> m) throws IOException {
+ if (m == null) {
+ return null;
+ }
+
+ final Token<DelegationTokenIdentifier> token
+ = new Token<DelegationTokenIdentifier>();
+ token.decodeFromUrlString((String)m.get("urlString"));
+ return token;
+ }
+
+ /** Convert a Json map to a Token of DelegationTokenIdentifier. */
+ @SuppressWarnings("unchecked")
+ public static Token<DelegationTokenIdentifier> toDelegationToken(
+ final Map<?, ?> m) throws IOException {
+ return (Token<DelegationTokenIdentifier>)toToken(m);
+ }
+
+ /** Convert a Json map to a Token of BlockTokenIdentifier. */
+ @SuppressWarnings("unchecked")
+ public static Token<BlockTokenIdentifier> toBlockToken(
+ final Map<?, ?> m) throws IOException {
+ return (Token<BlockTokenIdentifier>)toToken(m);
+ }
/** Convert an exception object to a Json string. */
public static String toJsonString(final Exception e) {
@@ -77,11 +139,10 @@ public class JsonUtil {
/** Convert a HdfsFileStatus object to a Json string. */
public static String toJsonString(final HdfsFileStatus status) {
- final Map<String, Object> m = jsonMap.get();
if (status == null) {
- m.put("isNull", true);
+ return null;
} else {
- m.put("isNull", false);
+ final Map<String, Object> m = jsonMap.get();
m.put("localName", status.getLocalName());
m.put("isDir", status.isDir());
m.put("len", status.getLen());
@@ -92,8 +153,8 @@ public class JsonUtil {
m.put("modificationTime", status.getModificationTime());
m.put("blockSize", status.getBlockSize());
m.put("replication", status.getReplication());
+ return JSON.toString(m);
}
- return JSON.toString(m);
}
@SuppressWarnings("unchecked")
@@ -101,9 +162,9 @@ public class JsonUtil {
return (Map<String, Object>) JSON.parse(jsonString);
}
- /** Convert a Json string to a HdfsFileStatus object. */
+ /** Convert a Json map to a HdfsFileStatus object. */
public static HdfsFileStatus toFileStatus(final Map<String, Object> m) {
- if ((Boolean)m.get("isNull")) {
+ if (m == null) {
return null;
}
@@ -120,4 +181,204 @@ public class JsonUtil {
return new HdfsFileStatus(len, isDir, replication, blockSize, mTime, aTime,
permission, owner, group, DFSUtil.string2Bytes(localName));
}
+
+ /** Convert a LocatedBlock to a Json string. */
+ public static String toJsonString(final Block Block) {
+ if (Block == null) {
+ return null;
+ }
+
+ final Map<String, Object> m = BlockMap.get();
+ m.put("blockId", Block.getBlockId());
+ m.put("numBytes", Block.getNumBytes());
+ m.put("generationStamp", Block.getGenerationStamp());
+ return JSON.toString(m);
+ }
+
+ /** Convert a Json map to an Block object. */
+ public static Block toBlock(final Map<?, ?> m) {
+ if (m == null) {
+ return null;
+ }
+
+ final long blockId = (Long)m.get("blockId");
+ final long numBytes = (Long)m.get("numBytes");
+ final long generationStamp = (Long)m.get("generationStamp");
+ return new Block(blockId, numBytes, generationStamp);
+ }
+
+ /** Convert a DatanodeInfo to a Json string. */
+ public static String toJsonString(final DatanodeInfo datanodeinfo) {
+ if (datanodeinfo == null) {
+ return null;
+ }
+
+ final Map<String, Object> m = datanodeInfoMap.get();
+ m.put("name", datanodeinfo.getName());
+ m.put("storageID", datanodeinfo.getStorageID());
+ m.put("infoPort", datanodeinfo.getInfoPort());
+
+ m.put("ipcPort", datanodeinfo.getIpcPort());
+
+ m.put("capacity", datanodeinfo.getCapacity());
+ m.put("dfsUsed", datanodeinfo.getDfsUsed());
+ m.put("remaining", datanodeinfo.getRemaining());
+ m.put("lastUpdate", datanodeinfo.getLastUpdate());
+ m.put("xceiverCount", datanodeinfo.getXceiverCount());
+ m.put("networkLocation", datanodeinfo.getNetworkLocation());
+ m.put("hostName", datanodeinfo.getHostName());
+ m.put("adminState", datanodeinfo.getAdminState().name());
+ return JSON.toString(m);
+ }
+
+ /** Convert a Json map to an DatanodeInfo object. */
+ public static DatanodeInfo toDatanodeInfo(final Map<?, ?> m) {
+ if (m == null) {
+ return null;
+ }
+
+ return new DatanodeInfo(
+ (String)m.get("name"),
+ (String)m.get("storageID"),
+ (int)(long)(Long)m.get("infoPort"),
+ (int)(long)(Long)m.get("ipcPort"),
+
+ (Long)m.get("capacity"),
+ (Long)m.get("dfsUsed"),
+ (Long)m.get("remaining"),
+ (Long)m.get("lastUpdate"),
+ (int)(long)(Long)m.get("xceiverCount"),
+ (String)m.get("networkLocation"),
+ (String)m.get("hostName"),
+ AdminStates.valueOf((String)m.get("adminState")));
+ }
+
+ /** Convert a DatanodeInfo[] to a Json string. */
+ public static String toJsonString(final DatanodeInfo[] array
+ ) throws IOException {
+ if (array == null) {
+ return null;
+ } else if (array.length == 0) {
+ return "[]";
+ } else {
+ final StringBuilder b = new StringBuilder().append('[').append(
+ toJsonString(array[0]));
+ for(int i = 1; i < array.length; i++) {
+ b.append(", ").append(toJsonString(array[i]));
+ }
+ return b.append(']').toString();
+ }
+ }
+
+ /** Convert an Object[] to a DatanodeInfo[]. */
+ public static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) {
+ if (objects == null) {
+ return null;
+ } else if (objects.length == 0) {
+ return EMPTY_DATANODE_INFO_ARRAY;
+ } else {
+ final DatanodeInfo[] array = new DatanodeInfo[objects.length];
+ for(int i = 0; i < array.length; i++) {
+ array[i] = (DatanodeInfo)toDatanodeInfo((Map<?, ?>) objects[i]);
+ }
+ return array;
+ }
+ }
+
+ /** Convert a LocatedBlock to a Json string. */
+ public static String toJsonString(final LocatedBlock locatedblock
+ ) throws IOException {
+ if (locatedblock == null) {
+ return null;
+ }
+
+ final Map<String, Object> m = locatedBlockMap.get();
+ m.put("blockToken", toJsonString(locatedblock.getBlockToken()));
+ m.put("isCorrupt", locatedblock.isCorrupt());
+ m.put("startOffset", locatedblock.getStartOffset());
+ m.put("block", toJsonString(locatedblock.getBlock()));
+
+ m.put("locations", toJsonString(locatedblock.getLocations()));
+ return JSON.toString(m);
+ }
+
+ /** Convert a Json map to LocatedBlock. */
+ public static LocatedBlock toLocatedBlock(final Map<?, ?> m) throws IOException {
+ if (m == null) {
+ return null;
+ }
+
+ final Block b = toBlock((Map<?, ?>)JSON.parse((String)m.get("block")));
+ final DatanodeInfo[] locations = toDatanodeInfoArray(
+ (Object[])JSON.parse((String)m.get("locations")));
+ final long startOffset = (Long)m.get("startOffset");
+ final boolean isCorrupt = (Boolean)m.get("isCorrupt");
+
+ final LocatedBlock locatedblock = new LocatedBlock(b, locations, startOffset, isCorrupt);
+ locatedblock.setBlockToken(toBlockToken((Map<?, ?>)JSON.parse((String)m.get("blockToken"))));
+ return locatedblock;
+ }
+
+ /** Convert a LocatedBlock[] to a Json string. */
+ public static String toJsonString(final List<LocatedBlock> array
+ ) throws IOException {
+ if (array == null) {
+ return null;
+ } else if (array.size() == 0) {
+ return "[]";
+ } else {
+ final StringBuilder b = new StringBuilder().append('[').append(
+ toJsonString(array.get(0)));
+ for(int i = 1; i < array.size(); i++) {
+ b.append(",\n ").append(toJsonString(array.get(i)));
+ }
+ return b.append(']').toString();
+ }
+ }
+
+ /** Convert an Object[] to a List of LocatedBlock.
+ * @throws IOException */
+ public static List<LocatedBlock> toLocatedBlockList(final Object[] objects
+ ) throws IOException {
+ if (objects == null) {
+ return null;
+ } else if (objects.length == 0) {
+ return Collections.emptyList();
+ } else {
+ final List<LocatedBlock> list = new ArrayList<LocatedBlock>(objects.length);
+ for(int i = 0; i < objects.length; i++) {
+ list.add((LocatedBlock)toLocatedBlock((Map<?, ?>)objects[i]));
+ }
+ return list;
+ }
+ }
+
+ /** Convert LocatedBlocks to a Json string. */
+ public static String toJsonString(final LocatedBlocks locatedblocks
+ ) throws IOException {
+ if (locatedblocks == null) {
+ return null;
+ }
+
+ final Map<String, Object> m = jsonMap.get();
+ m.put("fileLength", locatedblocks.getFileLength());
+ m.put("isUnderConstruction", locatedblocks.isUnderConstruction());
+
+ m.put("locatedBlocks", toJsonString(locatedblocks.getLocatedBlocks()));
+ return JSON.toString(m);
+ }
+
+ /** Convert a Json map to LocatedBlock. */
+ public static LocatedBlocks toLocatedBlocks(final Map<String, Object> m
+ ) throws IOException {
+ if (m == null) {
+ return null;
+ }
+
+ final long fileLength = (Long)m.get("fileLength");
+ final boolean isUnderConstruction = (Boolean)m.get("isUnderConstruction");
+ final List<LocatedBlock> locatedBlocks = toLocatedBlockList(
+ (Object[])JSON.parse((String) m.get("locatedBlocks")));
+ return new LocatedBlocks(fileLength, locatedBlocks, isUnderConstruction);
+ }
}
\ No newline at end of file
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Fri Sep 23 01:40:50 2011
@@ -27,9 +27,12 @@ import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -41,6 +44,7 @@ import org.apache.hadoop.hdfs.HftpFileSy
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
@@ -50,7 +54,9 @@ import org.apache.hadoop.hdfs.web.resour
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.GroupParam;
import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
+import org.apache.hadoop.hdfs.web.resources.LengthParam;
import org.apache.hadoop.hdfs.web.resources.ModificationTimeParam;
+import org.apache.hadoop.hdfs.web.resources.OffsetParam;
import org.apache.hadoop.hdfs.web.resources.OverwriteParam;
import org.apache.hadoop.hdfs.web.resources.OwnerParam;
import org.apache.hadoop.hdfs.web.resources.Param;
@@ -58,13 +64,16 @@ import org.apache.hadoop.hdfs.web.resour
import org.apache.hadoop.hdfs.web.resources.PostOpParam;
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
+import org.apache.hadoop.hdfs.web.resources.RenewerParam;
import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
import org.apache.hadoop.hdfs.web.resources.UserParam;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
import org.mortbay.util.ajax.JSON;
@@ -171,7 +180,7 @@ public class WebHdfsFileSystem extends H
final Param<?,?>... parameters) throws IOException {
//initialize URI path and query
final String path = "/" + PATH_PREFIX
- + makeQualified(fspath).toUri().getPath();
+ + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
final String query = op.toQueryString()
+ '&' + new UserParam(ugi)
+ Param.toSortedString("&", parameters);
@@ -390,4 +399,29 @@ public class WebHdfsFileSystem extends H
}
return statuses;
}
+
+ @Override
+ public Token<DelegationTokenIdentifier> getDelegationToken(final String renewer
+ ) throws IOException {
+ final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
+ final Map<String, Object> m = run(op, null, new RenewerParam(renewer));
+ final Token<DelegationTokenIdentifier> token = JsonUtil.toDelegationToken(m);
+ token.setService(new Text(getCanonicalServiceName()));
+ return token;
+ }
+
+ @Override
+ public BlockLocation[] getFileBlockLocations(final FileStatus status,
+ final long offset, final long length) throws IOException {
+ if (status == null) {
+ return null;
+ }
+ statistics.incrementReadOps(1);
+
+ final Path p = status.getPath();
+ final HttpOpParam.Op op = GetOpParam.Op.GETFILEBLOCKLOCATIONS;
+ final Map<String, Object> m = run(op, p, new OffsetParam(offset),
+ new LengthParam(length));
+ return DFSUtil.locatedBlocks2Locations(JsonUtil.toLocatedBlocks(m));
+ }
}
\ No newline at end of file
Modified: hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Fri Sep 23 01:40:50 2011
@@ -27,10 +27,13 @@ public class GetOpParam extends HttpOpPa
/** Get operations. */
public static enum Op implements HttpOpParam.Op {
OPEN(HttpURLConnection.HTTP_OK),
+ GETFILEBLOCKLOCATIONS(HttpURLConnection.HTTP_OK),
GETFILESTATUS(HttpURLConnection.HTTP_OK),
LISTSTATUS(HttpURLConnection.HTTP_OK),
+ GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK),
+
NULL(HttpURLConnection.HTTP_NOT_IMPLEMENTED);
final int expectedHttpResponseCode;
Propchange: hadoop/common/branches/branch-0.20-security-205/src/mapred/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Sep 23 01:40:50 2011
@@ -1,6 +1,6 @@
/hadoop/common/branches/branch-0.20/src/mapred:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,990003,1044225
/hadoop/common/branches/branch-0.20-append/src/mapred:955380,955398,955448,956329
-/hadoop/common/branches/branch-0.20-security/src/mapred:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192
+/hadoop/common/branches/branch-0.20-security/src/mapred:1170042,1170087,1170997,1171137,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470
/hadoop/common/branches/branch-0.20-security-203/src/mapred:1096071,1097011,1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1128115
/hadoop/common/branches/branch-0.20-security-204/src/mapred:1128390
/hadoop/core/branches/branch-0.19/src/mapred:713112
Propchange: hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Sep 23 01:40:50 2011
@@ -1,6 +1,6 @@
/hadoop/common/branches/branch-0.20/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:990003
/hadoop/common/branches/branch-0.20-append/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:955380,955398,955448,956329
-/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192
+/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470
/hadoop/common/branches/branch-0.20-security-203/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1128115
/hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/fs/TestHarFileSystem.java:1128390
/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestHarFileSystem.java:979485
Modified: hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java Fri Sep 23 01:40:50 2011
@@ -23,25 +23,29 @@ package org.apache.hadoop.hdfs.security;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
+import java.net.URI;
import java.security.PrivilegedExceptionAction;
-import junit.framework.Assert;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
+import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
-import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
+import org.apache.hadoop.security.token.Token;
+import org.apache.log4j.Level;
import org.junit.After;
+import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -53,6 +57,7 @@ public class TestDelegationToken {
@Before
public void setUp() throws Exception {
config = new Configuration();
+ config.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
config.setLong(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000);
config.setLong(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 5000);
config.set("hadoop.security.auth_to_local",
@@ -60,7 +65,7 @@ public class TestDelegationToken {
"DEFAULT");
FileSystem.setDefaultUri(config, "hdfs://localhost:" + "0");
- cluster = new MiniDFSCluster(0, config, 1, true, true, true, null, null, null, null);
+ cluster = new MiniDFSCluster(0, config, 0, true, true, true, null, null, null, null);
cluster.waitActive();
cluster.getNameNode().getNamesystem().getDelegationTokenSecretManager()
.startThreads();
@@ -160,6 +165,33 @@ public class TestDelegationToken {
}
@Test
+ public void testDelegationTokenWebHdfsApi() throws Exception {
+ ((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
+ final DelegationTokenSecretManager dtSecretManager = cluster.getNameNode(
+ ).getNamesystem().getDelegationTokenSecretManager();
+ final String uri = WebHdfsFileSystem.SCHEME + "://"
+ + config.get("dfs.http.address");
+ //get file system as JobTracker
+ final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
+ "JobTracker", new String[]{"user"});
+ final WebHdfsFileSystem webhdfs = ugi.doAs(
+ new PrivilegedExceptionAction<WebHdfsFileSystem>() {
+ @Override
+ public WebHdfsFileSystem run() throws Exception {
+ return (WebHdfsFileSystem)FileSystem.get(new URI(uri), config);
+ }
+ });
+
+ final Token<DelegationTokenIdentifier> token = webhdfs.getDelegationToken("JobTracker");
+ DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
+ byte[] tokenId = token.getIdentifier();
+ identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
+ LOG.info("A valid token should have non-null password, and should be renewed successfully");
+ Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier));
+ dtSecretManager.renewToken(token, "JobTracker");
+ }
+
+ @Test
public void testDelegationTokenWithDoAs() throws Exception {
final DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem();
final Token<DelegationTokenIdentifier> token = dfs.getDelegationToken(new Text(
Modified: hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1174490&r1=1174489&r2=1174490&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original)
+++ hadoop/common/branches/branch-0.20-security-205/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Fri Sep 23 01:40:50 2011
@@ -24,6 +24,8 @@ import java.net.URI;
import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
import org.apache.hadoop.fs.Path;
@@ -132,4 +134,19 @@ public class TestWebHdfsFileSystemContra
// expected
}
}
+
+ public void testGetFileBlockLocations() throws IOException {
+ final String f = "/test/testGetFileBlockLocations";
+ final Path p = path(f);
+ createFile(p);
+ final BlockLocation[] computed = fs.getFileBlockLocations(
+ fs.getFileStatus(p), 0L, 1L);
+ final FileSystem hdfs = cluster.getFileSystem();
+ final BlockLocation[] expected = hdfs.getFileBlockLocations(
+ hdfs.getFileStatus(new Path(f)), 0L, 1L);
+ assertEquals(expected.length, computed.length);
+ for(int i = 0; i < computed.length; i++) {
+ assertEquals(expected[i].toString(), computed[i].toString());
+ }
+ }
}
Propchange: hadoop/common/branches/branch-0.20-security-205/src/tools/org/apache/hadoop/tools/HadoopArchives.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Sep 23 01:40:50 2011
@@ -1,6 +1,6 @@
/hadoop/common/branches/branch-0.20/src/tools/org/apache/hadoop/tools/HadoopArchives.java:990003
/hadoop/common/branches/branch-0.20-append/src/tools/org/apache/hadoop/tools/HadoopArchives.java:955380,955398,955448,956329
-/hadoop/common/branches/branch-0.20-security/src/tools/org/apache/hadoop/tools/HadoopArchives.java:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192
+/hadoop/common/branches/branch-0.20-security/src/tools/org/apache/hadoop/tools/HadoopArchives.java:1170042,1170087,1170997,1171137,1171181,1171380,1171613,1171891,1171905,1172184,1172188,1172190,1172192,1173470
/hadoop/common/branches/branch-0.20-security-203/src/tools/org/apache/hadoop/tools/HadoopArchives.java:1097249,1097269,1097281,1097966,1098816,1098819,1098823,1098827,1098832,1098839,1098854,1098863,1099088,1099191,1099324,1099330,1099333,1128115
/hadoop/common/branches/branch-0.20-security-204/src/tools/org/apache/hadoop/tools/HadoopArchives.java:1128390
/hadoop/common/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java:910709