You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by to...@apache.org on 2011/11/01 23:44:30 UTC
svn commit: r1196341 - in /libcloud/trunk: libcloud/storage/drivers/s3.py
test/storage/test_s3.py
Author: tomaz
Date: Tue Nov 1 22:44:30 2011
New Revision: 1196341
URL: http://svn.apache.org/viewvc?rev=1196341&view=rev
Log:
Implement upload_object_via_stream in S3 storage driver. Because S3 doesn't
support chunked transfer encoding whole object is buffered in memory when using
this method.
Modified:
libcloud/trunk/libcloud/storage/drivers/s3.py
libcloud/trunk/test/storage/test_s3.py
Modified: libcloud/trunk/libcloud/storage/drivers/s3.py
URL: http://svn.apache.org/viewvc/libcloud/trunk/libcloud/storage/drivers/s3.py?rev=1196341&r1=1196340&r2=1196341&view=diff
==============================================================================
--- libcloud/trunk/libcloud/storage/drivers/s3.py (original)
+++ libcloud/trunk/libcloud/storage/drivers/s3.py Tue Nov 1 22:44:30 2011
@@ -161,6 +161,7 @@ class S3StorageDriver(StorageDriver):
name = 'Amazon S3 (standard)'
connectionCls = S3Connection
hash_type = 'md5'
+ supports_chunked_encoding = False
ex_location_name = ''
namespace = NAMESPACE
@@ -301,11 +302,17 @@ class S3StorageDriver(StorageDriver):
def upload_object_via_stream(self, iterator, container, object_name,
extra=None, ex_storage_class=None):
- # Amazon S3 does not support chunked transfer encoding.
- # Using multipart upload to "emulate" it would mean unnecessary
- # buffering of data in memory.
- raise NotImplementedError(
- 'upload_object_via_stream not implemented for this driver')
+ # Amazon S3 does not support chunked transfer encoding so the whole data
+ # is read into memory before uploading the object.
+ upload_func = self._upload_data
+ upload_func_kwargs = {}
+
+ return self._put_object(container=container, object_name=object_name,
+ upload_func=upload_func,
+ upload_func_kwargs=upload_func_kwargs,
+ extra=extra, iterator=iterator,
+ verify_hash=False,
+ storage_class=ex_storage_class)
def delete_object(self, obj):
object_name = self._clean_object_name(name=obj.name)
Modified: libcloud/trunk/test/storage/test_s3.py
URL: http://svn.apache.org/viewvc/libcloud/trunk/test/storage/test_s3.py?rev=1196341&r1=1196340&r2=1196341&view=diff
==============================================================================
--- libcloud/trunk/test/storage/test_s3.py (original)
+++ libcloud/trunk/test/storage/test_s3.py Tue Nov 1 22:44:30 2011
@@ -225,6 +225,15 @@ class S3MockRawResponse(MockRawResponse)
headers,
httplib.responses[httplib.OK])
+ def _foo_bar_container_foo_test_stream_data(self, method, url, body, headers):
+ # test_upload_object_via_stream
+ body = ''
+ headers = { 'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
+ return (httplib.OK,
+ body,
+ headers,
+ httplib.responses[httplib.OK])
+
class S3Tests(unittest.TestCase):
driver_type = S3StorageDriver
@@ -563,17 +572,18 @@ class S3Tests(unittest.TestCase):
S3StorageDriver._upload_file = old_func
def test_upload_object_via_stream(self):
- try:
- container = Container(name='foo_bar_container', extra={}, driver=self)
- object_name = 'foo_test_stream_data'
- iterator = DummyIterator(data=['2', '3', '5'])
- self.driver.upload_object_via_stream(container=container,
- object_name=object_name,
- iterator=iterator)
- except NotImplementedError:
- pass
- else:
- self.fail('Exception was not thrown')
+ container = Container(name='foo_bar_container', extra={},
+ driver=self.driver)
+ object_name = 'foo_test_stream_data'
+ iterator = DummyIterator(data=['2', '3', '5'])
+ extra = {'content_type': 'text/plain'}
+ obj = self.driver.upload_object_via_stream(container=container,
+ object_name=object_name,
+ iterator=iterator,
+ extra=extra)
+
+ self.assertEqual(obj.name, object_name)
+ self.assertEqual(obj.size, 3)
def test_delete_object_not_found(self):
self.mock_response_klass.type = 'NOT_FOUND'