You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by wh...@apache.org on 2014/10/01 22:22:34 UTC
[1/2] git commit: HDFS-7158. Reduce the memory usage of
WebImageViewer. Contributed by Haohui Mai.
Repository: hadoop
Updated Branches:
refs/heads/branch-2.6 bd69f8666 -> 5d2f32548
HDFS-7158. Reduce the memory usage of WebImageViewer. Contributed by Haohui Mai.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0eec4c18
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0eec4c18
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0eec4c18
Branch: refs/heads/branch-2.6
Commit: 0eec4c187c6b41ac9ba194e1b30c3b40d4e6a212
Parents: bd69f86
Author: Haohui Mai <wh...@apache.org>
Authored: Wed Oct 1 10:53:38 2014 -0700
Committer: Haohui Mai <wh...@apache.org>
Committed: Wed Oct 1 13:22:07 2014 -0700
----------------------------------------------------------------------
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 +
.../offlineImageViewer/FSImageHandler.java | 126 +++++-----
.../tools/offlineImageViewer/FSImageLoader.java | 236 ++++++++++++-------
3 files changed, 212 insertions(+), 152 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eec4c18/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 49598e1..74e2e95 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -273,6 +273,8 @@ Release 2.6.0 - UNRELEASED
HDFS-7153. Add storagePolicy to NN edit log during file creation.
(Arpit Agarwal)
+ HDFS-7158. Reduce the memory usage of WebImageViewer. (wheat9)
+
OPTIMIZATIONS
HDFS-6690. Deduplicate xattr names in memory. (wang)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eec4c18/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
index b6d760a..dea6422 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
@@ -24,6 +24,9 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.ipc.RemoteException;
+import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.MessageEvent;
@@ -37,99 +40,92 @@ import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.jboss.netty.handler.codec.http.HttpVersion;
import org.jboss.netty.handler.codec.http.QueryStringDecoder;
+import javax.management.Query;
+
/**
* Implement the read-only WebHDFS API for fsimage.
*/
-public class FSImageHandler extends SimpleChannelUpstreamHandler {
+class FSImageHandler extends SimpleChannelUpstreamHandler {
public static final Log LOG = LogFactory.getLog(FSImageHandler.class);
- private final FSImageLoader loader;
+ private final FSImageLoader image;
- public FSImageHandler(FSImageLoader loader) throws IOException {
- this.loader = loader;
+ FSImageHandler(FSImageLoader image) throws IOException {
+ this.image = image;
}
@Override
public void messageReceived(
ChannelHandlerContext ctx, MessageEvent e) throws Exception {
- String op = getOp(e);
+ ChannelFuture future = e.getFuture();
try {
- String path = getPath(e);
- handleOperation(op, path, e);
- } catch (Exception ex) {
- notFoundResponse(e);
- LOG.warn(ex.getMessage());
+ future = handleOperation(e);
} finally {
- e.getFuture().addListener(ChannelFutureListener.CLOSE);
- }
- }
-
- /** return the op parameter in upper case */
- private String getOp(MessageEvent e) {
- Map<String, List<String>> parameters = getDecoder(e).getParameters();
- if (parameters.containsKey("op")) {
- return parameters.get("op").get(0).toUpperCase();
- } else {
- // return "" to avoid NPE
- return "";
- }
- }
-
- private String getPath(MessageEvent e) throws FileNotFoundException {
- String path = getDecoder(e).getPath();
- // trim "/webhdfs/v1" to keep compatibility with WebHDFS API
- if (path.startsWith("/webhdfs/v1/")) {
- return path.replaceFirst("/webhdfs/v1", "");
- } else {
- throw new FileNotFoundException("Path: " + path + " should " +
- "start with \"/webhdfs/v1/\"");
+ future.addListener(ChannelFutureListener.CLOSE);
}
}
- private QueryStringDecoder getDecoder(MessageEvent e) {
- HttpRequest request = (HttpRequest) e.getMessage();
- return new QueryStringDecoder(request.getUri());
- }
-
- private void handleOperation(String op, String path, MessageEvent e)
+ private ChannelFuture handleOperation(MessageEvent e)
throws IOException {
HttpRequest request = (HttpRequest) e.getMessage();
HttpResponse response = new DefaultHttpResponse(
- HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
- response.setHeader(HttpHeaders.Names.CONTENT_TYPE,
- "application/json");
- String content = null;
+ HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
+ response.setHeader(HttpHeaders.Names.CONTENT_TYPE, "application/json");
- if (request.getMethod() == HttpMethod.GET){
- if (op.equals("GETFILESTATUS")) {
- content = loader.getFileStatus(path);
- } else if (op.equals("LISTSTATUS")) {
- content = loader.listStatus(path);
- } else if (op.equals("GETACLSTATUS")) {
- content = loader.getAclStatus(path);
- } else {
- response.setStatus(HttpResponseStatus.BAD_REQUEST);
- }
- } else {
- // only HTTP GET is allowed since fsimage is read-only.
+ if (request.getMethod() != HttpMethod.GET) {
response.setStatus(HttpResponseStatus.METHOD_NOT_ALLOWED);
+ return e.getChannel().write(response);
}
- if (content != null) {
- HttpHeaders.setContentLength(response, content.length());
- }
- e.getChannel().write(response);
+ QueryStringDecoder decoder = new QueryStringDecoder(request.getUri());
+ final String op = getOp(decoder);
- if (content != null) {
- e.getChannel().write(content);
+ String content;
+ String path = null;
+ try {
+ path = getPath(decoder);
+ if ("GETFILESTATUS".equals(op)) {
+ content = image.getFileStatus(path);
+ } else if ("LISTSTATUS".equals(op)) {
+ content = image.listStatus(path);
+ } else if ("GETACLSTATUS".equals(op)) {
+ content = image.getAclStatus(path);
+ } else {
+ throw new IllegalArgumentException("Invalid value for webhdfs parameter" + " \"op\"");
+ }
+ } catch (IllegalArgumentException ex) {
+ response.setStatus(HttpResponseStatus.BAD_REQUEST);
+ content = JsonUtil.toJsonString(ex);
+ } catch (FileNotFoundException ex) {
+ response.setStatus(HttpResponseStatus.NOT_FOUND);
+ content = JsonUtil.toJsonString(ex);
+ } catch (Exception ex) {
+ content = JsonUtil.toJsonString(ex);
}
+ HttpHeaders.setContentLength(response, content.length());
+ e.getChannel().write(response);
+ ChannelFuture future = e.getChannel().write(content);
+
LOG.info(response.getStatus().getCode() + " method="
+ request.getMethod().getName() + " op=" + op + " target=" + path);
+
+ return future;
}
- private void notFoundResponse(MessageEvent e) {
- HttpResponse response = new DefaultHttpResponse(
- HttpVersion.HTTP_1_1, HttpResponseStatus.NOT_FOUND);
- e.getChannel().write(response);
+ private static String getOp(QueryStringDecoder decoder) {
+ Map<String, List<String>> parameters = decoder.getParameters();
+ return parameters.containsKey("op")
+ ? parameters.get("op").get(0).toUpperCase() : null;
+ }
+
+ private static String getPath(QueryStringDecoder decoder)
+ throws FileNotFoundException {
+ String path = decoder.getPath();
+ if (path.startsWith("/webhdfs/v1/")) {
+ return path.substring(11);
+ } else {
+ throw new FileNotFoundException("Path: " + path + " should " +
+ "start with \"/webhdfs/v1/\"");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/0eec4c18/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
index bab83a1..b68d842 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
@@ -18,16 +18,22 @@
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import java.io.BufferedInputStream;
+import java.io.EOFException;
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
+import com.google.common.collect.ImmutableList;
+import com.google.protobuf.CodedInputStream;
+import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -55,14 +61,38 @@ import com.google.common.io.LimitInputStream;
class FSImageLoader {
public static final Log LOG = LogFactory.getLog(FSImageHandler.class);
- private static String[] stringTable;
- private static Map<Long, FsImageProto.INodeSection.INode> inodes =
- Maps.newHashMap();
- private static Map<Long, long[]> dirmap = Maps.newHashMap();
- private static List<FsImageProto.INodeReferenceSection.INodeReference>
- refList = Lists.newArrayList();
+ private final String[] stringTable;
+ // byte representation of inodes, sorted by id
+ private final byte[][] inodes;
+ private final Map<Long, long[]> dirmap;
+ private static final Comparator<byte[]> INODE_BYTES_COMPARATOR = new
+ Comparator<byte[]>() {
+ @Override
+ public int compare(byte[] o1, byte[] o2) {
+ try {
+ final FsImageProto.INodeSection.INode l = FsImageProto.INodeSection
+ .INode.parseFrom(o1);
+ final FsImageProto.INodeSection.INode r = FsImageProto.INodeSection
+ .INode.parseFrom(o2);
+ if (l.getId() < r.getId()) {
+ return -1;
+ } else if (l.getId() > r.getId()) {
+ return 1;
+ } else {
+ return 0;
+ }
+ } catch (InvalidProtocolBufferException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ };
- private FSImageLoader() {}
+ private FSImageLoader(String[] stringTable, byte[][] inodes,
+ Map<Long, long[]> dirmap) {
+ this.stringTable = stringTable;
+ this.inodes = inodes;
+ this.dirmap = dirmap;
+ }
/**
* Load fsimage into the memory.
@@ -79,7 +109,14 @@ class FSImageLoader {
FsImageProto.FileSummary summary = FSImageUtil.loadSummary(file);
FileInputStream fin = null;
+
try {
+ // Map to record INodeReference to the referred id
+ ImmutableList<Long> refIdList = null;
+ String[] stringTable = null;
+ byte[][] inodes = null;
+ Map<Long, long[]> dirmap = null;
+
fin = new FileInputStream(file.getFD());
ArrayList<FsImageProto.FileSummary.Section> sections =
@@ -109,34 +146,37 @@ class FSImageLoader {
summary.getCodec(), new BufferedInputStream(new LimitInputStream(
fin, s.getLength())));
+ LOG.debug("Loading section " + s.getName() + " length: " + s.getLength
+ ());
switch (FSImageFormatProtobuf.SectionName.fromString(s.getName())) {
case STRING_TABLE:
- loadStringTable(is);
+ stringTable = loadStringTable(is);
break;
case INODE:
- loadINodeSection(is);
+ inodes = loadINodeSection(is);
break;
case INODE_REFERENCE:
- loadINodeReferenceSection(is);
+ refIdList = loadINodeReferenceSection(is);
break;
case INODE_DIR:
- loadINodeDirectorySection(is);
+ dirmap = loadINodeDirectorySection(is, refIdList);
break;
default:
break;
}
}
+ return new FSImageLoader(stringTable, inodes, dirmap);
} finally {
IOUtils.cleanup(null, fin);
}
- return new FSImageLoader();
}
- private static void loadINodeDirectorySection(InputStream in)
+ private static Map<Long, long[]> loadINodeDirectorySection
+ (InputStream in, List<Long> refIdList)
throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loading directory section");
- }
+ LOG.info("Loading inode directory section");
+ Map<Long, long[]> dirs = Maps.newHashMap();
+ long counter = 0;
while (true) {
FsImageProto.INodeDirectorySection.DirEntry e =
FsImageProto.INodeDirectorySection.DirEntry.parseDelimitedFrom(in);
@@ -144,31 +184,27 @@ class FSImageLoader {
if (e == null) {
break;
}
+ ++counter;
+
long[] l = new long[e.getChildrenCount() + e.getRefChildrenCount()];
for (int i = 0; i < e.getChildrenCount(); ++i) {
l[i] = e.getChildren(i);
}
for (int i = e.getChildrenCount(); i < l.length; i++) {
int refId = e.getRefChildren(i - e.getChildrenCount());
- l[i] = refList.get(refId).getReferredId();
- }
- dirmap.put(e.getParent(), l);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loaded directory (parent " + e.getParent()
- + ") with " + e.getChildrenCount() + " children and "
- + e.getRefChildrenCount() + " reference children");
+ l[i] = refIdList.get(refId);
}
+ dirs.put(e.getParent(), l);
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loaded " + dirmap.size() + " directories");
- }
+ LOG.info("Loaded " + counter + " directories");
+ return dirs;
}
- private static void loadINodeReferenceSection(InputStream in)
+ private static ImmutableList<Long> loadINodeReferenceSection(InputStream in)
throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loading inode reference section");
- }
+ LOG.info("Loading inode references");
+ ImmutableList.Builder<Long> builder = ImmutableList.builder();
+ long counter = 0;
while (true) {
FsImageProto.INodeReferenceSection.INodeReference e =
FsImageProto.INodeReferenceSection.INodeReference
@@ -176,49 +212,44 @@ class FSImageLoader {
if (e == null) {
break;
}
- refList.add(e);
- if (LOG.isTraceEnabled()) {
- LOG.trace("Loaded inode reference named '" + e.getName()
- + "' referring to id " + e.getReferredId() + "");
- }
- }
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loaded " + refList.size() + " inode references");
+ ++counter;
+ builder.add(e.getReferredId());
}
+ LOG.info("Loaded " + counter + " inode references");
+ return builder.build();
}
- private static void loadINodeSection(InputStream in) throws IOException {
+ private static byte[][] loadINodeSection(InputStream in)
+ throws IOException {
FsImageProto.INodeSection s = FsImageProto.INodeSection
.parseDelimitedFrom(in);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found " + s.getNumInodes() + " inodes in inode section");
- }
+ LOG.info("Loading " + s.getNumInodes() + " inodes.");
+ final byte[][] inodes = new byte[(int) s.getNumInodes()][];
+
for (int i = 0; i < s.getNumInodes(); ++i) {
- FsImageProto.INodeSection.INode p = FsImageProto.INodeSection.INode
- .parseDelimitedFrom(in);
- inodes.put(p.getId(), p);
- if (LOG.isTraceEnabled()) {
- LOG.trace("Loaded inode id " + p.getId() + " type " + p.getType()
- + " name '" + p.getName().toStringUtf8() + "'");
- }
+ int size = CodedInputStream.readRawVarint32(in.read(), in);
+ byte[] bytes = new byte[size];
+ IOUtils.readFully(in, bytes, 0, size);
+ inodes[i] = bytes;
}
+ LOG.debug("Sorting inodes");
+ Arrays.sort(inodes, INODE_BYTES_COMPARATOR);
+ LOG.debug("Finished sorting inodes");
+ return inodes;
}
- private static void loadStringTable(InputStream in) throws IOException {
+ private static String[] loadStringTable(InputStream in) throws
+ IOException {
FsImageProto.StringTableSection s = FsImageProto.StringTableSection
.parseDelimitedFrom(in);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found " + s.getNumEntry() + " strings in string section");
- }
- stringTable = new String[s.getNumEntry() + 1];
+ LOG.info("Loading " + s.getNumEntry() + " strings");
+ String[] stringTable = new String[s.getNumEntry() + 1];
for (int i = 0; i < s.getNumEntry(); ++i) {
FsImageProto.StringTableSection.Entry e = FsImageProto
.StringTableSection.Entry.parseDelimitedFrom(in);
stringTable[e.getId()] = e.getStr();
- if (LOG.isTraceEnabled()) {
- LOG.trace("Loaded string " + e.getStr());
- }
}
+ return stringTable;
}
/**
@@ -229,7 +260,7 @@ class FSImageLoader {
*/
String getFileStatus(String path) throws IOException {
ObjectMapper mapper = new ObjectMapper();
- FsImageProto.INodeSection.INode inode = inodes.get(getINodeId(path));
+ FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path));
return "{\"FileStatus\":\n"
+ mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n";
}
@@ -256,10 +287,11 @@ class FSImageLoader {
return sb.toString();
}
- private List<Map<String, Object>> getFileStatusList(String path) {
+ private List<Map<String, Object>> getFileStatusList(String path)
+ throws IOException {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
- long id = getINodeId(path);
- FsImageProto.INodeSection.INode inode = inodes.get(id);
+ long id = lookup(path);
+ FsImageProto.INodeSection.INode inode = fromINodeId(id);
if (inode.getType() == FsImageProto.INodeSection.INode.Type.DIRECTORY) {
if (!dirmap.containsKey(id)) {
// if the directory is empty, return empty list
@@ -267,7 +299,7 @@ class FSImageLoader {
}
long[] children = dirmap.get(id);
for (long cid : children) {
- list.add(getFileStatus(inodes.get(cid), true));
+ list.add(getFileStatus(fromINodeId(cid), true));
}
} else {
list.add(getFileStatus(inode, false));
@@ -305,9 +337,9 @@ class FSImageLoader {
return sb.toString();
}
- private List<AclEntry> getAclEntryList(String path) {
- long id = getINodeId(path);
- FsImageProto.INodeSection.INode inode = inodes.get(id);
+ private List<AclEntry> getAclEntryList(String path) throws IOException {
+ long id = lookup(path);
+ FsImageProto.INodeSection.INode inode = fromINodeId(id);
switch (inode.getType()) {
case FILE: {
FsImageProto.INodeSection.INodeFile f = inode.getFile();
@@ -325,9 +357,9 @@ class FSImageLoader {
}
}
- private PermissionStatus getPermissionStatus(String path) {
- long id = getINodeId(path);
- FsImageProto.INodeSection.INode inode = inodes.get(id);
+ private PermissionStatus getPermissionStatus(String path) throws IOException {
+ long id = lookup(path);
+ FsImageProto.INodeSection.INode inode = fromINodeId(id);
switch (inode.getType()) {
case FILE: {
FsImageProto.INodeSection.INodeFile f = inode.getFile();
@@ -353,30 +385,41 @@ class FSImageLoader {
/**
* Return the INodeId of the specified path.
*/
- private long getINodeId(String strPath) {
- if (strPath.equals("/")) {
- return INodeId.ROOT_INODE_ID;
- }
-
- String[] nameList = strPath.split("/");
- Preconditions.checkArgument(nameList.length > 1,
- "Illegal path: " + strPath);
+ private long lookup(String path) throws IOException {
+ Preconditions.checkArgument(path.startsWith("/"));
long id = INodeId.ROOT_INODE_ID;
- for (int i = 1; i < nameList.length; i++) {
- long[] children = dirmap.get(id);
- Preconditions.checkNotNull(children, "File: " +
- strPath + " is not found in the fsimage.");
- String cName = nameList[i];
- boolean findChildren = false;
+ for (int offset = 0, next; offset < path.length(); offset = next) {
+ next = path.indexOf('/', offset + 1);
+ if (next == -1) {
+ next = path.length();
+ }
+ if (offset + 1 > next) {
+ break;
+ }
+
+ final String component = path.substring(offset + 1, next);
+
+ if (component.isEmpty()) {
+ continue;
+ }
+
+ final long[] children = dirmap.get(id);
+ if (children == null) {
+ throw new FileNotFoundException(path);
+ }
+
+ boolean found = false;
for (long cid : children) {
- if (cName.equals(inodes.get(cid).getName().toStringUtf8())) {
- id = cid;
- findChildren = true;
+ FsImageProto.INodeSection.INode child = fromINodeId(cid);
+ if (component.equals(child.getName().toStringUtf8())) {
+ found = true;
+ id = child.getId();
break;
}
}
- Preconditions.checkArgument(findChildren, "File: " +
- strPath + " is not found in the fsimage.");
+ if (!found) {
+ throw new FileNotFoundException(path);
+ }
}
return id;
}
@@ -460,4 +503,23 @@ class FSImageLoader {
private String toString(FsPermission permission) {
return String.format("%o", permission.toShort());
}
+
+ private FsImageProto.INodeSection.INode fromINodeId(final long id)
+ throws IOException {
+ int l = 0, r = inodes.length;
+ while (l < r) {
+ int mid = l + (r - l) / 2;
+ FsImageProto.INodeSection.INode n = FsImageProto.INodeSection.INode
+ .parseFrom(inodes[mid]);
+ long nid = n.getId();
+ if (id > nid) {
+ l = mid + 1;
+ } else if (id < nid) {
+ r = mid;
+ } else {
+ return n;
+ }
+ }
+ return null;
+ }
}
[2/2] git commit: HDFS-6894. Add XDR parser method for each NFS
response. Contributed by Brandon Li.
Posted by wh...@apache.org.
HDFS-6894. Add XDR parser method for each NFS response. Contributed by Brandon Li.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5d2f3254
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5d2f3254
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5d2f3254
Branch: refs/heads/branch-2.6
Commit: 5d2f325483ae6589b60fbec6879894e80fd3dde2
Parents: 0eec4c1
Author: Haohui Mai <wh...@apache.org>
Authored: Wed Oct 1 13:18:56 2014 -0700
Committer: Haohui Mai <wh...@apache.org>
Committed: Wed Oct 1 13:22:12 2014 -0700
----------------------------------------------------------------------
.../nfs/nfs3/response/ACCESS3Response.java | 16 ++++++-
.../nfs/nfs3/response/COMMIT3Response.java | 14 +++++-
.../nfs/nfs3/response/CREATE3Response.java | 20 ++++++++-
.../nfs/nfs3/response/FSINFO3Response.java | 37 +++++++++++++++-
.../nfs/nfs3/response/FSSTAT3Response.java | 30 ++++++++++++-
.../nfs/nfs3/response/GETATTR3Response.java | 11 ++++-
.../hadoop/nfs/nfs3/response/LINK3Response.java | 14 +++++-
.../nfs/nfs3/response/LOOKUP3Response.java | 4 +-
.../nfs/nfs3/response/MKDIR3Response.java | 20 ++++++++-
.../nfs/nfs3/response/MKNOD3Response.java | 20 ++++++++-
.../hadoop/nfs/nfs3/response/NFS3Response.java | 4 +-
.../nfs/nfs3/response/PATHCONF3Response.java | 27 +++++++++++-
.../hadoop/nfs/nfs3/response/READ3Response.java | 24 ++++++++++-
.../nfs/nfs3/response/READDIR3Response.java | 45 +++++++++++++++++---
.../nfs/nfs3/response/READDIRPLUS3Response.java | 40 ++++++++++++++++-
.../nfs/nfs3/response/READLINK3Response.java | 17 +++++++-
.../nfs/nfs3/response/REMOVE3Response.java | 12 +++++-
.../nfs/nfs3/response/RENAME3Response.java | 11 ++++-
.../nfs/nfs3/response/RMDIR3Response.java | 10 ++++-
.../nfs/nfs3/response/SETATTR3Response.java | 10 ++++-
.../nfs/nfs3/response/SYMLINK3Response.java | 20 ++++++++-
.../nfs/nfs3/response/WRITE3Response.java | 22 +++++++++-
.../hadoop/nfs/nfs3/response/WccAttr.java | 7 +++
.../hadoop/nfs/nfs3/response/WccData.java | 8 ++++
.../hadoop/hdfs/nfs/nfs3/OpenFileCtx.java | 16 +++----
.../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java | 4 +-
.../hadoop/hdfs/nfs/nfs3/WriteManager.java | 8 ++--
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++
28 files changed, 413 insertions(+), 61 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
index 5720fc6..03c649c 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java
@@ -43,9 +43,21 @@ public class ACCESS3Response extends NFS3Response {
this.access = access;
}
+ public static ACCESS3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ Nfs3FileAttributes postOpAttr = null;
+ int access = 0;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ postOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ access = xdr.readInt();
+ }
+ return new ACCESS3Response(status, postOpAttr, access);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (this.getStatus() == Nfs3Status.NFS3_OK) {
out.writeBoolean(true);
postOpAttr.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java
index fd90b18..a2cb222 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java
@@ -47,9 +47,19 @@ public class COMMIT3Response extends NFS3Response {
return verf;
}
+ public static COMMIT3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ long verf = 0;
+ WccData fileWcc = WccData.deserialize(xdr);
+ if (status == Nfs3Status.NFS3_OK) {
+ verf = xdr.readHyper();
+ }
+ return new COMMIT3Response(status, fileWcc, verf);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
fileWcc.serialize(out);
if (getStatus() == Nfs3Status.NFS3_OK) {
out.writeLongAsHyper(verf);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java
index 593cd82..0b54187 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java
@@ -55,9 +55,25 @@ public class CREATE3Response extends NFS3Response {
return dirWcc;
}
+ public static CREATE3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ FileHandle objHandle = new FileHandle();
+ Nfs3FileAttributes postOpObjAttr = null;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ xdr.readBoolean();
+ objHandle.deserialize(xdr);
+ xdr.readBoolean();
+ postOpObjAttr = Nfs3FileAttributes.deserialize(xdr);
+ }
+
+ WccData dirWcc = WccData.deserialize(xdr);
+ return new CREATE3Response(status, objHandle, postOpObjAttr, dirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (getStatus() == Nfs3Status.NFS3_OK) {
out.writeBoolean(true); // Handle follows
objHandle.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java
index ed301ac..ebd54fe 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.nfs.nfs3.response;
import org.apache.hadoop.nfs.NfsTime;
+import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
import org.apache.hadoop.oncrpc.XDR;
@@ -109,9 +110,41 @@ public class FSINFO3Response extends NFS3Response {
this.properties = properties;
}
+ public static FSINFO3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpObjAttr = Nfs3FileAttributes.deserialize(xdr);
+ int rtmax = 0;
+ int rtpref = 0;
+ int rtmult = 0;
+ int wtmax = 0;
+ int wtpref = 0;
+ int wtmult = 0;
+ int dtpref = 0;
+ long maxFileSize = 0;
+ NfsTime timeDelta = null;
+ int properties = 0;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ rtmax = xdr.readInt();
+ rtpref = xdr.readInt();
+ rtmult = xdr.readInt();
+ wtmax = xdr.readInt();
+ wtpref = xdr.readInt();
+ wtmult = xdr.readInt();
+ dtpref = xdr.readInt();
+ maxFileSize = xdr.readHyper();
+ timeDelta = NfsTime.deserialize(xdr);
+ properties = xdr.readInt();
+ }
+ return new FSINFO3Response(status, postOpObjAttr, rtmax, rtpref, rtmult,
+ wtmax, wtpref, wtmult, dtpref, maxFileSize, timeDelta, properties);
+
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
out.writeBoolean(true);
postOpAttr.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java
index 0dd3f73..c0d1a8a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.nfs.nfs3.response;
+import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
import org.apache.hadoop.oncrpc.XDR;
@@ -90,9 +91,34 @@ public class FSSTAT3Response extends NFS3Response {
this.invarsec = invarsec;
}
+ public static FSSTAT3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ long tbytes = 0;
+ long fbytes = 0;
+ long abytes = 0;
+ long tfiles = 0;
+ long ffiles = 0;
+ long afiles = 0;
+ int invarsec = 0;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ tbytes = xdr.readHyper();
+ fbytes = xdr.readHyper();
+ abytes = xdr.readHyper();
+ tfiles = xdr.readHyper();
+ ffiles = xdr.readHyper();
+ afiles = xdr.readHyper();
+ invarsec = xdr.readInt();
+ }
+ return new FSSTAT3Response(status, postOpAttr, tbytes, fbytes, abytes,
+ tfiles, ffiles, afiles, invarsec);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
out.writeBoolean(true);
if (postOpAttr == null) {
postOpAttr = new Nfs3FileAttributes();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java
index b9bb3f0..092abbc 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java
@@ -40,9 +40,16 @@ public class GETATTR3Response extends NFS3Response {
this.postOpAttr = postOpAttr;
}
+ public static GETATTR3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ Nfs3FileAttributes attr = (status == Nfs3Status.NFS3_OK) ? Nfs3FileAttributes
+ .deserialize(xdr) : new Nfs3FileAttributes();
+ return new GETATTR3Response(status, attr);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (getStatus() == Nfs3Status.NFS3_OK) {
postOpAttr.serialize(out);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LINK3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LINK3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LINK3Response.java
index 834ee3c..3893aa1 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LINK3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LINK3Response.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.nfs.nfs3.response;
+import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
+import org.apache.hadoop.nfs.nfs3.Nfs3Status;
import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.Verifier;
@@ -43,9 +45,17 @@ public class LINK3Response extends NFS3Response {
return linkDirWcc;
}
+ public static LINK3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ WccData fromDirWcc = WccData.deserialize(xdr);
+ WccData linkDirWcc = WccData.deserialize(xdr);
+ return new LINK3Response(status, fromDirWcc, linkDirWcc);
+ }
+
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
fromDirWcc.serialize(out);
linkDirWcc.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java
index 5d33f98..9aa86b9 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java
@@ -62,8 +62,8 @@ public class LOOKUP3Response extends NFS3Response {
}
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (this.status == Nfs3Status.NFS3_OK) {
fileHandle.serialize(out);
out.writeBoolean(true); // Attribute follows
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java
index 0e0980a..a07a3da 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java
@@ -55,9 +55,25 @@ public class MKDIR3Response extends NFS3Response {
return dirWcc;
}
+ public static MKDIR3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ FileHandle objFileHandle = new FileHandle();
+ Nfs3FileAttributes objAttr = null;
+ WccData dirWcc;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ xdr.readBoolean();
+ objFileHandle.deserialize(xdr);
+ xdr.readBoolean();
+ objAttr = Nfs3FileAttributes.deserialize(xdr);
+ }
+ dirWcc = WccData.deserialize(xdr);
+ return new MKDIR3Response(status, objFileHandle, objAttr, dirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (getStatus() == Nfs3Status.NFS3_OK) {
out.writeBoolean(true); // Handle follows
objFileHandle.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKNOD3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKNOD3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKNOD3Response.java
index 292094e..57faa55 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKNOD3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKNOD3Response.java
@@ -52,9 +52,25 @@ public class MKNOD3Response extends NFS3Response {
return dirWcc;
}
+ public static MKNOD3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ FileHandle objFileHandle = new FileHandle();
+ Nfs3FileAttributes objPostOpAttr = null;
+ WccData dirWcc;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ xdr.readBoolean();
+ objFileHandle.deserialize(xdr);
+ xdr.readBoolean();
+ objPostOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ }
+ dirWcc = WccData.deserialize(xdr);
+ return new MKNOD3Response(status, objFileHandle, objPostOpAttr, dirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (this.getStatus() == Nfs3Status.NFS3_OK) {
out.writeBoolean(true);
objFileHandle.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java
index e30af82..2a2a1e4 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java
@@ -39,12 +39,12 @@ public class NFS3Response {
public void setStatus(int status) {
this.status = status;
}
-
+
/**
* Write the response, along with the rpc header (including verifier), to the
* XDR.
*/
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
RpcAcceptedReply reply = RpcAcceptedReply.getAcceptInstance(xid, verifier);
reply.write(out);
out.writeInt(this.getStatus());
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java
index e4578a0..cb5f31c 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java
@@ -77,9 +77,32 @@ public class PATHCONF3Response extends NFS3Response {
this.casePreserving = casePreserving;
}
+ public static PATHCONF3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes objPostOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ int linkMax = 0;
+ int nameMax = 0;
+ boolean noTrunc = false;
+ boolean chownRestricted = false;
+ boolean caseInsensitive = false;
+ boolean casePreserving = false;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ linkMax = xdr.readInt();
+ nameMax = xdr.readInt();
+ noTrunc = xdr.readBoolean();
+ chownRestricted = xdr.readBoolean();
+ caseInsensitive = xdr.readBoolean();
+ casePreserving = xdr.readBoolean();
+ }
+ return new PATHCONF3Response(status, objPostOpAttr, linkMax, nameMax,
+ noTrunc, chownRestricted, caseInsensitive, casePreserving);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
out.writeBoolean(true);
postOpAttr.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java
index 2524ca0..05eef5b 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java
@@ -62,9 +62,29 @@ public class READ3Response extends NFS3Response {
return data;
}
+ public static READ3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ int count = 0;
+ boolean eof = false;
+ byte[] data = new byte[0];
+
+ if (status == Nfs3Status.NFS3_OK) {
+ count = xdr.readInt();
+ eof = xdr.readBoolean();
+ int len = xdr.readInt();
+ assert (len == count);
+ data = xdr.readFixedOpaque(count);
+ }
+
+ return new READ3Response(status, postOpAttr, count, eof,
+ ByteBuffer.wrap(data));
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
out.writeBoolean(true); // Attribute follows
postOpAttr.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java
index e0a0d96..5bde2c0 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.nfs.nfs3.response;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -59,6 +61,19 @@ public class READDIR3Response extends NFS3Response {
long getCookie() {
return cookie;
}
+
+ static Entry3 deserialzie(XDR xdr) {
+ long fileId = xdr.readHyper();
+ String name = xdr.readString();
+ long cookie = xdr.readHyper();
+ return new Entry3(fileId, name, cookie);
+ }
+
+ void seralize(XDR xdr) {
+ xdr.writeLongAsHyper(getFileId());
+ xdr.writeString(getName());
+ xdr.writeLongAsHyper(getCookie());
+ }
}
public static class DirList3 {
@@ -104,9 +119,31 @@ public class READDIR3Response extends NFS3Response {
return dirList;
}
+ public static READDIR3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpDirAttr = Nfs3FileAttributes.deserialize(xdr);
+ long cookieVerf = 0;
+ ArrayList<Entry3> entries = new ArrayList<Entry3>();
+ DirList3 dirList = null;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ cookieVerf = xdr.readHyper();
+ while (xdr.readBoolean()) {
+ Entry3 e = Entry3.deserialzie(xdr);
+ entries.add(e);
+ }
+ boolean eof = xdr.readBoolean();
+ Entry3[] allEntries = new Entry3[entries.size()];
+ entries.toArray(allEntries);
+ dirList = new DirList3(allEntries, eof);
+ }
+ return new READDIR3Response(status, postOpDirAttr, cookieVerf, dirList);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR xdr, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(xdr, xid, verifier);
+ public XDR serialize(XDR xdr, int xid, Verifier verifier) {
+ super.serialize(xdr, xid, verifier);
xdr.writeBoolean(true); // Attributes follow
postOpDirAttr.serialize(xdr);
@@ -114,9 +151,7 @@ public class READDIR3Response extends NFS3Response {
xdr.writeLongAsHyper(cookieVerf);
for (Entry3 e : dirList.entries) {
xdr.writeBoolean(true); // Value follows
- xdr.writeLongAsHyper(e.getFileId());
- xdr.writeString(e.getName());
- xdr.writeLongAsHyper(e.getCookie());
+ e.seralize(xdr);
}
xdr.writeBoolean(false);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java
index eeda867..cf32bd1 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.nfs.nfs3.response;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -24,6 +25,8 @@ import java.util.List;
import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
+import org.apache.hadoop.nfs.nfs3.response.READDIR3Response.DirList3;
+import org.apache.hadoop.nfs.nfs3.response.READDIR3Response.Entry3;
import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.Verifier;
@@ -58,6 +61,17 @@ public class READDIRPLUS3Response extends NFS3Response {
return name;
}
+ static EntryPlus3 deseralize(XDR xdr) {
+ long fileId = xdr.readHyper();
+ String name = xdr.readString();
+ long cookie = xdr.readHyper();
+ xdr.readBoolean();
+ Nfs3FileAttributes nameAttr = Nfs3FileAttributes.deserialize(xdr);
+ FileHandle objFileHandle = new FileHandle();
+ objFileHandle.deserialize(xdr);
+ return new EntryPlus3(fileId, name, cookie, nameAttr, objFileHandle);
+ }
+
void seralize(XDR xdr) {
xdr.writeLongAsHyper(fileId);
xdr.writeString(name);
@@ -105,9 +119,31 @@ public class READDIRPLUS3Response extends NFS3Response {
this.dirListPlus = dirListPlus;
}
+ public static READDIRPLUS3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpDirAttr = Nfs3FileAttributes.deserialize(xdr);
+ long cookieVerf = 0;
+ ArrayList<EntryPlus3> entries = new ArrayList<EntryPlus3>();
+ DirListPlus3 dirList = null;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ cookieVerf = xdr.readHyper();
+ while (xdr.readBoolean()) {
+ EntryPlus3 e = EntryPlus3.deseralize(xdr);
+ entries.add(e);
+ }
+ boolean eof = xdr.readBoolean();
+ EntryPlus3[] allEntries = new EntryPlus3[entries.size()];
+ entries.toArray(allEntries);
+ dirList = new DirListPlus3(allEntries, eof);
+ }
+ return new READDIRPLUS3Response(status, postOpDirAttr, cookieVerf, dirList);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
out.writeBoolean(true); // attributes follow
if (postOpDirAttr == null) {
postOpDirAttr = new Nfs3FileAttributes();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java
index fe54296..9041d4a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java
@@ -41,9 +41,22 @@ public class READLINK3Response extends NFS3Response {
System.arraycopy(path, 0, this.path, 0, path.length);
}
+ public static READLINK3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpSymlinkAttr = Nfs3FileAttributes.deserialize(xdr);
+ byte path[] = new byte[0];
+
+ if (status == Nfs3Status.NFS3_OK) {
+ path = xdr.readVariableOpaque();
+ }
+
+ return new READLINK3Response(status, postOpSymlinkAttr, path);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
out.writeBoolean(true); // Attribute follows
postOpSymlinkAttr.serialize(out);
if (getStatus() == Nfs3Status.NFS3_OK) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java
index 55b880b..f0fcb3d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.nfs.nfs3.response;
+import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
+import org.apache.hadoop.nfs.nfs3.Nfs3Status;
import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.Verifier;
@@ -35,9 +37,15 @@ public class REMOVE3Response extends NFS3Response {
this.dirWcc = dirWcc;
}
+ public static REMOVE3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ WccData dirWcc = WccData.deserialize(xdr);
+ return new REMOVE3Response(status, dirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (dirWcc == null) {
dirWcc = new WccData(null, null);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java
index 4e0f4c0..3a8d68a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java
@@ -45,9 +45,16 @@ public class RENAME3Response extends NFS3Response {
return toDirWcc;
}
+ public static RENAME3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ WccData fromDirWcc = WccData.deserialize(xdr);
+ WccData toDirWcc = WccData.deserialize(xdr);
+ return new RENAME3Response(status, fromDirWcc, toDirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
fromDirWcc.serialize(out);
toDirWcc.serialize(out);
return out;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java
index e7b0664..229ab60 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java
@@ -39,9 +39,15 @@ public class RMDIR3Response extends NFS3Response {
return dirWcc;
}
+ public static RMDIR3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ WccData dirWcc = WccData.deserialize(xdr);
+ return new RMDIR3Response(status, dirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
dirWcc.serialize(out);
return out;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java
index eda06d4..aa6c934 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java
@@ -39,9 +39,15 @@ public class SETATTR3Response extends NFS3Response {
return wccData;
}
+ public static SETATTR3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ WccData wccData = WccData.deserialize(xdr);
+ return new SETATTR3Response(status, wccData);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
wccData.serialize(out);
return out;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java
index d8e3441..3a0c188 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java
@@ -55,9 +55,25 @@ public class SYMLINK3Response extends NFS3Response {
return dirWcc;
}
+ public static SYMLINK3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ FileHandle objFileHandle = new FileHandle();
+ Nfs3FileAttributes objPostOpAttr = null;
+ WccData dirWcc;
+ if (status == Nfs3Status.NFS3_OK) {
+ xdr.readBoolean();
+ objFileHandle.deserialize(xdr);
+ xdr.readBoolean();
+ objPostOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ }
+
+ dirWcc = WccData.deserialize(xdr);
+ return new SYMLINK3Response(status, objFileHandle, objPostOpAttr, dirWcc);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
if (this.getStatus() == Nfs3Status.NFS3_OK) {
out.writeBoolean(true);
objFileHandle.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java
index f33c4de..8d4b4d9 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java
@@ -17,7 +17,9 @@
*/
package org.apache.hadoop.nfs.nfs3.response;
+import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
+import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
import org.apache.hadoop.oncrpc.XDR;
@@ -58,9 +60,25 @@ public class WRITE3Response extends NFS3Response {
return verifer;
}
+ public static WRITE3Response deserialize(XDR xdr) {
+ int status = xdr.readInt();
+ WccData fileWcc = WccData.deserialize(xdr);
+ int count = 0;
+ WriteStableHow stableHow = null;
+ long verifier = 0;
+
+ if (status == Nfs3Status.NFS3_OK) {
+ count = xdr.readInt();
+ int how = xdr.readInt();
+ stableHow = WriteStableHow.values()[how];
+ verifier = xdr.readHyper();
+ }
+ return new WRITE3Response(status, fileWcc, count, stableHow, verifier);
+ }
+
@Override
- public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
- super.writeHeaderAndResponse(out, xid, verifier);
+ public XDR serialize(XDR out, int xid, Verifier verifier) {
+ super.serialize(out, xid, verifier);
fileWcc.serialize(out);
if (getStatus() == Nfs3Status.NFS3_OK) {
out.writeInt(count);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java
index bf33f79..34df29a 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccAttr.java
@@ -52,6 +52,13 @@ public class WccAttr {
this.ctime = ctime;
}
+ public static WccAttr deserialize(XDR xdr) {
+ long size = xdr.readHyper();
+ NfsTime mtime = NfsTime.deserialize(xdr);
+ NfsTime ctime = NfsTime.deserialize(xdr);
+ return new WccAttr(size, mtime, ctime);
+ }
+
public void serialize(XDR out) {
out.writeLongAsHyper(size);
if (mtime == null) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccData.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccData.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccData.java
index 4a00a84..c868213 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccData.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WccData.java
@@ -49,6 +49,14 @@ public class WccData {
: postOpAttr;
}
+ public static WccData deserialize(XDR xdr) {
+ xdr.readBoolean();
+ WccAttr preOpAttr = WccAttr.deserialize(xdr);
+ xdr.readBoolean();
+ Nfs3FileAttributes postOpAttr = Nfs3FileAttributes.deserialize(xdr);
+ return new WccData(preOpAttr, postOpAttr);
+ }
+
public void serialize(XDR out) {
out.writeBoolean(true); // attributes follow
preOpAttr.serialize(out);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
index 6a5368c..dc2f1b3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
@@ -409,7 +409,7 @@ class OpenFileCtx {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
fileWcc, 0, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
Nfs3Utils.writeChannel(channel,
- response.writeHeaderAndResponse(new XDR(), xid, new VerifierNone()),
+ response.serialize(new XDR(), xid, new VerifierNone()),
xid);
} else {
// Update the write time first
@@ -435,7 +435,7 @@ class OpenFileCtx {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
fileWcc, request.getCount(), request.getStableHow(),
Nfs3Constant.WRITE_COMMIT_VERF);
- Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
+ Nfs3Utils.writeChannel(channel, response.serialize(
new XDR(), xid, new VerifierNone()), xid);
}
} else {
@@ -572,7 +572,7 @@ class OpenFileCtx {
}
updateLastAccessTime();
Nfs3Utils.writeChannel(channel,
- response.writeHeaderAndResponse(new XDR(), xid, new VerifierNone()),
+ response.serialize(new XDR(), xid, new VerifierNone()),
xid);
}
@@ -644,7 +644,7 @@ class OpenFileCtx {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
Nfs3Utils
- .writeChannel(channel, response.writeHeaderAndResponse(new XDR(),
+ .writeChannel(channel, response.serialize(new XDR(),
xid, new VerifierNone()), xid);
writeCtx.setReplied(true);
}
@@ -1026,7 +1026,7 @@ class OpenFileCtx {
COMMIT3Response response = new COMMIT3Response(status, wccData,
Nfs3Constant.WRITE_COMMIT_VERF);
Nfs3Utils.writeChannelCommit(commit.getChannel(), response
- .writeHeaderAndResponse(new XDR(), commit.getXid(),
+ .serialize(new XDR(), commit.getXid(),
new VerifierNone()), commit.getXid());
if (LOG.isDebugEnabled()) {
@@ -1107,7 +1107,7 @@ class OpenFileCtx {
}
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
- Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
+ Nfs3Utils.writeChannel(channel, response.serialize(
new XDR(), xid, new VerifierNone()), xid);
}
@@ -1119,7 +1119,7 @@ class OpenFileCtx {
+ offset + " and length " + count, e);
if (!writeCtx.getReplied()) {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO);
- Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
+ Nfs3Utils.writeChannel(channel, response.serialize(
new XDR(), xid, new VerifierNone()), xid);
// Keep stream open. Either client retries or SteamMonitor closes it.
}
@@ -1170,7 +1170,7 @@ class OpenFileCtx {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
fileWcc, 0, writeCtx.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
Nfs3Utils.writeChannel(writeCtx.getChannel(), response
- .writeHeaderAndResponse(new XDR(), writeCtx.getXid(),
+ .serialize(new XDR(), writeCtx.getXid(),
new VerifierNone()), writeCtx.getXid());
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
index 70c37d8..6012b9b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
@@ -2025,7 +2025,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
WccData fileWcc = new WccData(Nfs3Utils.getWccAttr(preOpAttr), postOpAttr);
int status = mapErrorStatus(e);
return new COMMIT3Response(status, fileWcc,
- Nfs3Constant.WRITE_COMMIT_VERF);
+ Nfs3Constant.WRITE_COMMIT_VERF);
}
}
@@ -2163,7 +2163,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
return;
}
// TODO: currently we just return VerifierNone
- out = response.writeHeaderAndResponse(out, xid, new VerifierNone());
+ out = response.serialize(out, xid, new VerifierNone());
ChannelBuffer buf = ChannelBuffers.wrappedBuffer(out.asReadOnlyWrap()
.buffer());
RpcResponse rsp = new RpcResponse(buf, info.remoteAddress());
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java
index 5f2ded7..7bddc44 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java
@@ -123,7 +123,7 @@ public class WriteManager {
byte[] data = request.getData().array();
if (data.length < count) {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_INVAL);
- Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
+ Nfs3Utils.writeChannel(channel, response.serialize(
new XDR(), xid, new VerifierNone()), xid);
return;
}
@@ -169,7 +169,7 @@ public class WriteManager {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
fileWcc, count, request.getStableHow(),
Nfs3Constant.WRITE_COMMIT_VERF);
- Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
+ Nfs3Utils.writeChannel(channel, response.serialize(
new XDR(), xid, new VerifierNone()), xid);
return;
}
@@ -192,7 +192,7 @@ public class WriteManager {
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_JUKEBOX,
fileWcc, 0, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
Nfs3Utils.writeChannel(channel,
- response.writeHeaderAndResponse(new XDR(), xid, new VerifierNone()),
+ response.serialize(new XDR(), xid, new VerifierNone()),
xid);
return;
}
@@ -297,7 +297,7 @@ public class WriteManager {
COMMIT3Response response = new COMMIT3Response(status, fileWcc,
Nfs3Constant.WRITE_COMMIT_VERF);
Nfs3Utils.writeChannelCommit(channel,
- response.writeHeaderAndResponse(new XDR(), xid, new VerifierNone()),
+ response.serialize(new XDR(), xid, new VerifierNone()),
xid);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d2f3254/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 74e2e95..6546a96 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -275,6 +275,9 @@ Release 2.6.0 - UNRELEASED
HDFS-7158. Reduce the memory usage of WebImageViewer. (wheat9)
+ HDFS-6894. Add XDR parser method for each NFS response.
+ (Brandon Li via wheat9)
+
OPTIMIZATIONS
HDFS-6690. Deduplicate xattr names in memory. (wang)