You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sr...@apache.org on 2013/03/08 22:47:57 UTC

svn commit: r1454593 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/java/org/apache/hadoop/ipc/ src/main/proto/

Author: sradia
Date: Fri Mar  8 21:47:57 2013
New Revision: 1454593

URL: http://svn.apache.org/r1454593
Log:
HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending it separately (sanjay Radia)

Modified:
    hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
    hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1454593&r1=1454592&r2=1454593&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Fri Mar  8 21:47:57 2013
@@ -8,6 +8,12 @@ Trunk (Unreleased)
     FSDataOutputStream.sync() and Syncable.sync().  (szetszwo)
 
     HADOOP-8886. Remove KFS support. (eli)
+    
+    HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+    avoid an extra copy (Sanjay Radia)
+
+    HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
+    it separately (sanjay Radia)
 
   NEW FEATURES
     
@@ -157,8 +163,6 @@ Trunk (Unreleased)
     HADOOP-9112. test-patch should -1 for @Tests without a timeout 
     (Surenkumar Nihalani via bobby)
 
-    HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
-    avoid an extra copy (Sanjay Radia)
 
   BUG FIXES
 

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1454593&r1=1454592&r2=1454593&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Fri Mar  8 21:47:57 2013
@@ -308,4 +308,11 @@
        <Method name="removeRenewAction" />
        <Bug pattern="BC_UNCONFIRMED_CAST" />
      </Match>
+     
+     <!-- Inconsistent synchronization flagged by findbugs is not valid. -->
+     <Match>
+       <Class name="org.apache.hadoop.ipc.Client$Connection" />
+       <Field name="in" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
  </FindBugsFilter>

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1454593&r1=1454592&r2=1454593&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Fri Mar  8 21:47:57 2013
@@ -59,7 +59,6 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
@@ -945,31 +944,38 @@ public class Client {
       touch();
       
       try {
-        RpcResponseHeaderProto response = 
+        RpcResponseHeaderProto header = 
             RpcResponseHeaderProto.parseDelimitedFrom(in);
-        if (response == null) {
+        if (header == null) {
           throw new IOException("Response is null.");
         }
 
-        int callId = response.getCallId();
+        int callId = header.getCallId();
         if (LOG.isDebugEnabled())
           LOG.debug(getName() + " got value #" + callId);
 
         Call call = calls.get(callId);
-        RpcStatusProto status = response.getStatus();
+        RpcStatusProto status = header.getStatus();
         if (status == RpcStatusProto.SUCCESS) {
           Writable value = ReflectionUtils.newInstance(valueClass, conf);
           value.readFields(in);                 // read value
           call.setRpcResponse(value);
           calls.remove(callId);
-        } else if (status == RpcStatusProto.ERROR) {
-          call.setException(new RemoteException(WritableUtils.readString(in),
-                                                WritableUtils.readString(in)));
-          calls.remove(callId);
-        } else if (status == RpcStatusProto.FATAL) {
-          // Close the connection
-          markClosed(new RemoteException(WritableUtils.readString(in), 
-                                         WritableUtils.readString(in)));
+        } else { // Rpc Request failed
+            final String exceptionClassName = header.hasExceptionClassName() ?
+                header.getExceptionClassName() : 
+                  "ServerDidNotSetExceptionClassName";
+            final String errorMsg = header.hasErrorMsg() ? 
+                header.getErrorMsg() : "ServerDidNotSetErrorMsg" ;
+          RemoteException re = 
+              new RemoteException(exceptionClassName, errorMsg);
+          if (status == RpcStatusProto.ERROR) {
+            call.setException(re);
+            calls.remove(callId);
+          } else if (status == RpcStatusProto.FATAL) {
+            // Close the connection
+            markClosed(re);
+          }
         }
       } catch (IOException e) {
         markClosed(e);

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1454593&r1=1454592&r2=1454593&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Mar  8 21:47:57 2013
@@ -2001,6 +2001,7 @@ public abstract class Server {
         RpcResponseHeaderProto.newBuilder();
     response.setCallId(call.callId);
     response.setStatus(status);
+    response.setServerIpcVersionNum(Server.CURRENT_VERSION);
 
 
     if (status == RpcStatusProto.SUCCESS) {
@@ -2017,13 +2018,10 @@ public abstract class Server {
             StringUtils.stringifyException(t));
         return;
       }
-    } else {
-      if (status == RpcStatusProto.FATAL) {
-        response.setServerIpcVersionNum(Server.CURRENT_VERSION);
-      }
+    } else { // Rpc Failure
+      response.setExceptionClassName(errorClass);
+      response.setErrorMsg(error);
       response.build().writeDelimitedTo(out);
-      WritableUtils.writeString(out, errorClass);
-      WritableUtils.writeString(out, error);
     }
     if (call.connection.useWrap) {
       wrapWithSasl(responseBuf, call);

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto?rev=1454593&r1=1454592&r2=1454593&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto Fri Mar  8 21:47:57 2013
@@ -70,12 +70,11 @@ message RpcRequestHeaderProto { // the h
  * | RpcResponseHeaderProto - serialized delimited ie has len         |
  * +------------------------------------------------------------------+
  * | if request is successful:                                        |
- * |   - RpcResponse -  The actual rpc response  bytes                 |
- * |     This response is serialized based on RpcKindProto             |
+ * |   - RpcResponse -  The actual rpc response  bytes follow         |
+ *       the response header                                          |
+ * |     This response is serialized based on RpcKindProto            |
  * | if request fails :                                               |
- * |   - length (4 byte int) + Class name of exception - UTF-8 string |
- * |   - length (4 byte int) + Stacktrace - UTF-8 string              |
- * |   if the strings are null then the length is -1                  |
+ * |   The rpc response header contains the necessary info            |
  * +------------------------------------------------------------------+
  *
  */
@@ -88,5 +87,7 @@ message RpcResponseHeaderProto {
 
   required uint32 callId = 1; // callId used in Request
   required RpcStatusProto status = 2;
-  optional uint32 serverIpcVersionNum = 3; // in case of an fatal IPC error 
+  optional uint32 serverIpcVersionNum = 3; // Sent if success or fail
+  optional string exceptionClassName = 4;  // if request fails
+  optional string errorMsg = 5;  // if request fails, often contains strack trace
 }