You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/10/26 22:29:03 UTC

svn commit: r468117 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/ipc/Client.java src/java/org/apache/hadoop/ipc/Server.java

Author: cutting
Date: Thu Oct 26 13:29:02 2006
New Revision: 468117

URL: http://svn.apache.org/viewvc?view=rev&rev=468117
Log:
HADOOP-642.  Change IPC client to specify an explicit connect timeout.  Contributed by Konstantin.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=468117&r1=468116&r2=468117
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Thu Oct 26 13:29:02 2006
@@ -66,6 +66,9 @@
     a re-discovered DataNode, so that no-longer-needed replications
     are stopped promptly.  (Konstantin Shvachko via cutting)
 
+18. HADOOP-642.  Change IPC client to specify an explicit connect
+    timeout.  (Konstantin Shvachko via cutting)
+
 
 Release 0.7.2 - 2006-10-18
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?view=diff&rev=468117&r1=468116&r2=468117
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Thu Oct 26 13:29:02 2006
@@ -32,16 +32,14 @@
 
 import java.util.Hashtable;
 import java.util.Iterator;
-import java.util.Collection;
-import java.util.Random;
 
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.dfs.FSConstants;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.DataOutputBuffer;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
@@ -134,7 +132,8 @@
       short failures = 0;
       while (true) {
         try {
-          this.socket = new Socket(address.getAddress(), address.getPort());
+          this.socket = new Socket();
+          this.socket.connect(address, FSConstants.READ_TIMEOUT);
           break;
         } catch (IOException ie) { //SocketTimeoutException is also caught 
           if (failures == maxRetries) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?view=diff&rev=468117&r1=468116&r2=468117
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Thu Oct 26 13:29:02 2006
@@ -295,9 +295,9 @@
         numConnections++;
       }
       if (LOG.isDebugEnabled())
-        LOG.debug("Server connection on port " + port + " from " + 
-                c.getHostAddress() +
-                ": starting. Number of active connections: " + numConnections);
+        LOG.debug("Server connection from " + c.toString() +
+                "; # active connections: " + numConnections +
+                "; # queued calls: " + callQueue.size() );
     }
 
     void doRead(SelectionKey key) {