You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by ha...@apache.org on 2010/03/19 10:00:27 UTC

svn commit: r925147 - in /hadoop/avro/trunk: CHANGES.txt lang/py/src/avro/ipc.py lang/py/src/avro/tool.py

Author: hammer
Date: Fri Mar 19 09:00:27 2010
New Revision: 925147

URL: http://svn.apache.org/viewvc?rev=925147&view=rev
Log:
AVRO-423. HTTPTransceiver does not reuse connections
(Eric Evans via hammer)


Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/lang/py/src/avro/ipc.py
    hadoop/avro/trunk/lang/py/src/avro/tool.py

Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=925147&r1=925146&r2=925147&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Fri Mar 19 09:00:27 2010
@@ -17,6 +17,9 @@ Avro 1.3.2 (unreleased)
     AVRO-451. Try to use hashlib in Python implementation and fall
     back to md5 if we can't find it (Bruce Mitchener via hammer)
 
+    AVRO-423. HTTPTransceiver does not reuse connections
+    (Eric Evans via hammer)
+
   BUG FIXES
 
     AVRO-479. Fix 'sign' target in top-level build.sh to generate md5

Modified: hadoop/avro/trunk/lang/py/src/avro/ipc.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lang/py/src/avro/ipc.py?rev=925147&r1=925146&r2=925147&view=diff
==============================================================================
--- hadoop/avro/trunk/lang/py/src/avro/ipc.py (original)
+++ hadoop/avro/trunk/lang/py/src/avro/ipc.py Fri Mar 19 09:00:27 2010
@@ -453,8 +453,9 @@ class HTTPTransceiver(object):
   A simple HTTP-based transceiver implementation.
   Useful for clients but not for servers
   """
-  def __init__(self, conn):
-    self.conn = conn
+  def __init__(self, host, port):
+    self.conn = httplib.HTTPConnection(host, port)
+    self.conn.connect()
 
   # read-only properties
   sock = property(lambda self: self.conn.sock)
@@ -466,16 +467,16 @@ class HTTPTransceiver(object):
   conn = property(lambda self: self._conn, set_conn)
 
   def transceive(self, request):
-    self.conn.close()
-    self.conn = httplib.HTTPConnection(self.conn.host, self.conn.port)
-    conn_success = self.conn.connect()
     self.write_framed_message(request)
     result = self.read_framed_message()
     return result
 
   def read_framed_message(self):
-    response_reader = FramedReader(self.conn.getresponse())
-    return response_reader.read_framed_message()
+    response = self.conn.getresponse()
+    response_reader = FramedReader(response)
+    framed_message = response_reader.read_framed_message()
+    response.read()    # ensure we're ready for subsequent requests
+    return framed_message
 
   def write_framed_message(self, message):
     req_method = 'POST'

Modified: hadoop/avro/trunk/lang/py/src/avro/tool.py
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/lang/py/src/avro/tool.py?rev=925147&r1=925146&r2=925147&view=diff
==============================================================================
--- hadoop/avro/trunk/lang/py/src/avro/tool.py (original)
+++ hadoop/avro/trunk/lang/py/src/avro/tool.py Fri Mar 19 09:00:27 2010
@@ -91,9 +91,7 @@ def run_server(uri, proto, msg, datum):
 
 def send_message(uri, proto, msg, datum):
   url_obj = urlparse.urlparse(uri)
-  conn = httplib.HTTPConnection(url_obj.hostname, url_obj.port)
-  conn.connect()
-  client = ipc.HTTPTransceiver(conn)
+  client = ipc.HTTPTransceiver(url_obj.hostname, url_obj.port)
   proto_json = file(proto, 'r').read()
   requestor = ipc.Requestor(protocol.parse(proto_json), client)
   print requestor.request(msg, datum)