You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2013/03/05 05:50:29 UTC
svn commit: r1452666 - in
/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common:
./ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/fs/
src/main/java/org/apache/hadoop/ipc/ src/main/proto/ src/test/core/
src/test/ja...
Author: suresh
Date: Tue Mar 5 04:50:28 2013
New Revision: 1452666
URL: http://svn.apache.org/r1452666
Log:
Merge trunk to branch-trunk-win
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1452666&r1=1452665&r2=1452666&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt Tue Mar 5 04:50:28 2013
@@ -153,6 +153,9 @@ Trunk (Unreleased)
HADOOP-9112. test-patch should -1 for @Tests without a timeout
(Surenkumar Nihalani via bobby)
+ HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+ avoid an extra copy (Sanjay Radia)
+
BUG FIXES
HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)
@@ -403,6 +406,9 @@ Release 2.0.4-beta - UNRELEASED
HADOOP-9349. Confusing output when running hadoop version from one hadoop
installation when HADOOP_HOME points to another. (sandyr via tucu)
+ HADOOP-9337. org.apache.hadoop.fs.DF.getMount() does not work on Mac OS.
+ (Ivan A. Veselovsky via atm)
+
Release 2.0.3-alpha - 2013-02-06
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1452450-1452659
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1452450-1452659
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1452450-1452659
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java?rev=1452666&r1=1452665&r2=1452666&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java Tue Mar 5 04:50:28 2013
@@ -179,7 +179,8 @@ public class DF extends Shell {
protected String[] getExecString() {
// ignoring the error since the exit code it enough
return (WINDOWS)? new String[]{"cmd", "/c", "df -k " + dirPath + " 2>nul"}:
- new String[] {"bash","-c","exec 'df' '-k' '" + dirPath + "' 2>/dev/null"};
+ new String[] {"bash","-c","exec 'df' '-k' '-P' '" + dirPath
+ + "' 2>/dev/null"};
}
@Override
@@ -222,28 +223,11 @@ public class DF extends Shell {
}
try {
- switch(getOSType()) {
- case OS_TYPE_AIX:
- Long.parseLong(tokens.nextToken()); // capacity
- Long.parseLong(tokens.nextToken()); // available
- Integer.parseInt(tokens.nextToken()); // pct used
- tokens.nextToken();
- tokens.nextToken();
- this.mount = tokens.nextToken();
- break;
-
- case OS_TYPE_WIN:
- case OS_TYPE_SOLARIS:
- case OS_TYPE_MAC:
- case OS_TYPE_UNIX:
- default:
- Long.parseLong(tokens.nextToken()); // capacity
- Long.parseLong(tokens.nextToken()); // used
- Long.parseLong(tokens.nextToken()); // available
- Integer.parseInt(tokens.nextToken()); // pct used
- this.mount = tokens.nextToken();
- break;
- }
+ Long.parseLong(tokens.nextToken()); // capacity
+ Long.parseLong(tokens.nextToken()); // used
+ Long.parseLong(tokens.nextToken()); // available
+ Integer.parseInt(tokens.nextToken()); // pct used
+ this.mount = tokens.nextToken();
} catch (NoSuchElementException e) {
throw new IOException("Could not parse line: " + line);
} catch (NumberFormatException e) {
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1452666&r1=1452665&r2=1452666&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Tue Mar 5 04:50:28 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.ipc;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import java.io.OutputStream;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.net.InetSocketAddress;
@@ -39,7 +40,7 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.ipc.Client.ConnectionId;
import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestProto;
+import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -128,25 +129,12 @@ public class ProtobufRpcEngine implement
.getProtocolVersion(protocol);
}
- private RequestProto constructRpcRequest(Method method,
- Object[] params) throws ServiceException {
- RequestProto rpcRequest;
- RequestProto.Builder builder = RequestProto
+ private RequestHeaderProto constructRpcRequestHeader(Method method) {
+ RequestHeaderProto.Builder builder = RequestHeaderProto
.newBuilder();
builder.setMethodName(method.getName());
+
- if (params.length != 2) { // RpcController + Message
- throw new ServiceException("Too many parameters for request. Method: ["
- + method.getName() + "]" + ", Expected: 2, Actual: "
- + params.length);
- }
- if (params[1] == null) {
- throw new ServiceException("null param while calling Method: ["
- + method.getName() + "]");
- }
-
- Message param = (Message) params[1];
- builder.setRequest(param.toByteString());
// For protobuf, {@code protocol} used when creating client side proxy is
// the interface extending BlockingInterface, which has the annotations
// such as ProtocolName etc.
@@ -160,8 +148,7 @@ public class ProtobufRpcEngine implement
// For PB this may limit the use of mixins on client side.
builder.setDeclaringClassProtocolName(protocolName);
builder.setClientProtocolVersion(clientProtocolVersion);
- rpcRequest = builder.build();
- return rpcRequest;
+ return builder.build();
}
/**
@@ -189,8 +176,18 @@ public class ProtobufRpcEngine implement
if (LOG.isDebugEnabled()) {
startTime = Time.now();
}
+
+ if (args.length != 2) { // RpcController + Message
+ throw new ServiceException("Too many parameters for request. Method: ["
+ + method.getName() + "]" + ", Expected: 2, Actual: "
+ + args.length);
+ }
+ if (args[1] == null) {
+ throw new ServiceException("null param while calling Method: ["
+ + method.getName() + "]");
+ }
- RequestProto rpcRequest = constructRpcRequest(method, args);
+ RequestHeaderProto rpcRequestHeader = constructRpcRequestHeader(method);
RpcResponseWrapper val = null;
if (LOG.isTraceEnabled()) {
@@ -198,9 +195,12 @@ public class ProtobufRpcEngine implement
remoteId + ": " + method.getName() +
" {" + TextFormat.shortDebugString((Message) args[1]) + "}");
}
+
+
+ Message theRequest = (Message) args[1];
try {
val = (RpcResponseWrapper) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
- new RpcRequestWrapper(rpcRequest), remoteId);
+ new RpcRequestWrapper(rpcRequestHeader, theRequest), remoteId);
} catch (Throwable e) {
if (LOG.isTraceEnabled()) {
@@ -275,20 +275,25 @@ public class ProtobufRpcEngine implement
* use type Writable as a wrapper to work across multiple RpcEngine kinds.
*/
private static class RpcRequestWrapper implements Writable {
- RequestProto message;
+ RequestHeaderProto requestHeader;
+ Message theRequest; // for clientSide, the request is here
+ byte[] theRequestRead; // for server side, the request is here
@SuppressWarnings("unused")
public RpcRequestWrapper() {
}
- RpcRequestWrapper(RequestProto message) {
- this.message = message;
+ RpcRequestWrapper(RequestHeaderProto requestHeader, Message theRequest) {
+ this.requestHeader = requestHeader;
+ this.theRequest = theRequest;
}
@Override
public void write(DataOutput out) throws IOException {
- ((Message)message).writeDelimitedTo(
- DataOutputOutputStream.constructOutputStream(out));
+ OutputStream os = DataOutputOutputStream.constructOutputStream(out);
+
+ ((Message)requestHeader).writeDelimitedTo(os);
+ theRequest.writeDelimitedTo(os);
}
@Override
@@ -296,13 +301,16 @@ public class ProtobufRpcEngine implement
int length = ProtoUtil.readRawVarint32(in);
byte[] bytes = new byte[length];
in.readFully(bytes);
- message = RequestProto.parseFrom(bytes);
+ requestHeader = RequestHeaderProto.parseFrom(bytes);
+ length = ProtoUtil.readRawVarint32(in);
+ theRequestRead = new byte[length];
+ in.readFully(theRequestRead);
}
@Override
public String toString() {
- return message.getDeclaringClassProtocolName() + "." +
- message.getMethodName();
+ return requestHeader.getDeclaringClassProtocolName() + "." +
+ requestHeader.getMethodName();
}
}
@@ -434,7 +442,7 @@ public class ProtobufRpcEngine implement
public Writable call(RPC.Server server, String connectionProtocolName,
Writable writableRequest, long receiveTime) throws Exception {
RpcRequestWrapper request = (RpcRequestWrapper) writableRequest;
- RequestProto rpcRequest = request.message;
+ RequestHeaderProto rpcRequest = request.requestHeader;
String methodName = rpcRequest.getMethodName();
@@ -474,7 +482,8 @@ public class ProtobufRpcEngine implement
}
Message prototype = service.getRequestPrototype(methodDescriptor);
Message param = prototype.newBuilderForType()
- .mergeFrom(rpcRequest.getRequest()).build();
+ .mergeFrom(request.theRequestRead).build();
+
Message result;
try {
long startTime = Time.now();
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto?rev=1452666&r1=1452665&r2=1452666&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto Tue Mar 5 04:50:28 2013
@@ -1,4 +1,4 @@
-/**
+/**DER
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -28,20 +28,17 @@ option java_generate_equals_and_hash = t
package hadoop.common;
/**
- * This message is used for Protobuf Rpc Engine.
- * The message is used to marshal a Rpc-request
- * from RPC client to the RPC server.
+ * This message is the header for the Protobuf Rpc Engine
+ * when sending a RPC request from RPC client to the RPC server.
+ * The actual request (serialized as protobuf) follows this request.
*
* No special header is needed for the Rpc Response for Protobuf Rpc Engine.
* The normal RPC response header (see RpcHeader.proto) are sufficient.
*/
-message RequestProto {
+message RequestHeaderProto {
/** Name of the RPC method */
required string methodName = 1;
- /** Bytes corresponding to the client protobuf request */
- optional bytes request = 2;
-
/**
* RPCs for a particular interface (ie protocol) are done using a
* IPC connection that is setup using rpcProxy.
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1452450-1452659
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1452666&r1=1452665&r2=1452666&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java Tue Mar 5 04:50:28 2013
@@ -31,6 +31,7 @@ import java.util.Random;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.junit.Test;
+import static org.junit.Assert.*;
public class TestDFVariations {
@@ -46,14 +47,8 @@ public class TestDFVariations {
}
@Override
protected String[] getExecString() {
- switch(getOSType()) {
- case OS_TYPE_AIX:
- return new String[] { "echo", "IGNORE\n", "/dev/sda3",
- "453115160", "400077240", "11%", "18", "skip%", "/foo/bar", "\n" };
- default:
- return new String[] { "echo", "IGNORE\n", "/dev/sda3",
- "453115160", "53037920", "400077240", "11%", "/foo/bar", "\n" };
- }
+ return new String[] { "echo", "IGNORE\n",
+ "/dev/sda3", "453115160", "53037920", "400077240", "11%", "/foo/bar\n"};
}
}
@@ -135,5 +130,20 @@ public class TestDFVariations {
System.out.println(e.toString());
}
}
+
+ @Test(timeout=5000)
+ public void testGetMountCurrentDirectory() throws Exception {
+ File currentDirectory = new File(".");
+ String workingDir = currentDirectory.getAbsoluteFile().getCanonicalPath();
+ DF df = new DF(new File(workingDir), 0L);
+ String mountPath = df.getMount();
+ File mountDir = new File(mountPath);
+ assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should exist.",
+ mountDir.exists());
+ assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should be directory.",
+ mountDir.isDirectory());
+ assertTrue("Working dir ["+workingDir+"] should start with ["+mountPath+"].",
+ workingDir.startsWith(mountPath));
+ }
}