You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by to...@apache.org on 2014/01/06 15:21:29 UTC
[1/2] git commit: Re-order docstrings.
Updated Branches:
refs/heads/trunk 11c9f00ef -> 26537d8db
Re-order docstrings.
Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo
Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/5b4e2c0b
Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/5b4e2c0b
Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/5b4e2c0b
Branch: refs/heads/trunk
Commit: 5b4e2c0b6128fb4c792eaf8ed0c390a538d84bf7
Parents: 11c9f00
Author: Tomaz Muraus <to...@apache.org>
Authored: Mon Jan 6 14:45:40 2014 +0100
Committer: Tomaz Muraus <to...@apache.org>
Committed: Mon Jan 6 14:46:22 2014 +0100
----------------------------------------------------------------------
libcloud/utils/files.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/libcloud/blob/5b4e2c0b/libcloud/utils/files.py
----------------------------------------------------------------------
diff --git a/libcloud/utils/files.py b/libcloud/utils/files.py
index 19b2a3f..4d85c8b 100644
--- a/libcloud/utils/files.py
+++ b/libcloud/utils/files.py
@@ -37,16 +37,16 @@ def read_in_chunks(iterator, chunk_size=None, fill_size=False):
"""
Return a generator which yields data in chunks.
- :type iterator: :class:`object` which implements iterator interface.
- :param response: An object which implements an iterator interface
+ :param terator: An object which implements an iterator interface
or a File like object with read method.
+ :type iterator: :class:`object` which implements iterator interface.
- :type chunk_size: ``int``
:param chunk_size: Optional chunk size (defaults to CHUNK_SIZE)
+ :type chunk_size: ``int``
+ :param fill_size: If True, make sure chunks are exactly chunk_size in
+ length (except for last chunk).
:type fill_size: ``bool``
- :param fill_size: If True, make sure chunks are chunk_size in length
- (except for last chunk).
TODO: At some point in the future we could use byte arrays here if version
>= Python 3. This should speed things up a bit and reduce memory usage.
[2/2] git commit: Fix an exception which would get thrown if user
tries to upload an empty object via multipart upload in the S3 driver.
Posted by to...@apache.org.
Fix an exception which would get thrown if user tries to upload an
empty object via multipart upload in the S3 driver.
Part of LIBCLOUD-490.
Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo
Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/26537d8d
Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/26537d8d
Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/26537d8d
Branch: refs/heads/trunk
Commit: 26537d8db34ca7e52246ea0d6ef26b99aa99c5b7
Parents: 5b4e2c0
Author: Tomaz Muraus <to...@apache.org>
Authored: Mon Jan 6 15:06:24 2014 +0100
Committer: Tomaz Muraus <to...@apache.org>
Committed: Mon Jan 6 15:20:50 2014 +0100
----------------------------------------------------------------------
libcloud/storage/drivers/s3.py | 2 +-
libcloud/test/storage/test_s3.py | 26 ++++++++++++++++++++++++--
libcloud/test/test_utils.py | 24 ++++++++++++++++++++++++
libcloud/utils/files.py | 10 +++++++++-
4 files changed, 58 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/storage/drivers/s3.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/s3.py b/libcloud/storage/drivers/s3.py
index dee2887..947b1d7 100644
--- a/libcloud/storage/drivers/s3.py
+++ b/libcloud/storage/drivers/s3.py
@@ -522,7 +522,7 @@ class S3StorageDriver(StorageDriver):
# Read the input data in chunk sizes suitable for AWS
for data in read_in_chunks(iterator, chunk_size=CHUNK_SIZE,
- fill_size=True):
+ fill_size=True, yield_empty=True):
bytes_transferred += len(data)
if calculate_hash:
http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/test/storage/test_s3.py
----------------------------------------------------------------------
diff --git a/libcloud/test/storage/test_s3.py b/libcloud/test/storage/test_s3.py
index 4bab644..b7e24ad 100644
--- a/libcloud/test/storage/test_s3.py
+++ b/libcloud/test/storage/test_s3.py
@@ -244,6 +244,9 @@ class S3MockHttp(StorageMockHttp, MockHttpTestCase):
self.assertEqual(part_no, str(count))
self.assertEqual(etag, headers['etag'])
+ # Make sure that manifest contains at least one part
+ self.assertTrue(count >= 1)
+
body = self.fixtures.load('complete_multipart.xml')
return (httplib.OK,
body,
@@ -747,8 +750,28 @@ class S3Tests(unittest.TestCase):
self.assertTrue('some-value' in obj.meta_data)
self.driver_type._upload_file = old_func
- def test_upload_small_object_via_stream(self):
+ def test_upload_empty_object_via_stream(self):
+ if self.driver.supports_s3_multipart_upload:
+ self.mock_raw_response_klass.type = 'MULTIPART'
+ self.mock_response_klass.type = 'MULTIPART'
+ else:
+ self.mock_raw_response_klass.type = None
+ self.mock_response_klass.type = None
+ container = Container(name='foo_bar_container', extra={},
+ driver=self.driver)
+ object_name = 'foo_test_stream_data'
+ iterator = DummyIterator(data=[''])
+ extra = {'content_type': 'text/plain'}
+ obj = self.driver.upload_object_via_stream(container=container,
+ object_name=object_name,
+ iterator=iterator,
+ extra=extra)
+
+ self.assertEqual(obj.name, object_name)
+ self.assertEqual(obj.size, 0)
+
+ def test_upload_small_object_via_stream(self):
if self.driver.supports_s3_multipart_upload:
self.mock_raw_response_klass.type = 'MULTIPART'
self.mock_response_klass.type = 'MULTIPART'
@@ -770,7 +793,6 @@ class S3Tests(unittest.TestCase):
self.assertEqual(obj.size, 3)
def test_upload_big_object_via_stream(self):
-
if self.driver.supports_s3_multipart_upload:
self.mock_raw_response_klass.type = 'MULTIPART'
self.mock_response_klass.type = 'MULTIPART'
http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/test/test_utils.py
----------------------------------------------------------------------
diff --git a/libcloud/test/test_utils.py b/libcloud/test/test_utils.py
index cd8950b..1d900d4 100644
--- a/libcloud/test/test_utils.py
+++ b/libcloud/test/test_utils.py
@@ -40,6 +40,7 @@ from libcloud.utils.misc import get_secure_random_string
from libcloud.utils.networking import is_public_subnet
from libcloud.utils.networking import is_private_subnet
from libcloud.utils.networking import is_valid_ip_address
+from libcloud.storage.drivers.dummy import DummyIterator
WARNINGS_BUFFER = []
@@ -148,6 +149,29 @@ class TestUtils(unittest.TestCase):
libcloud.utils.in_development_warning('test_module')
self.assertEqual(len(WARNINGS_BUFFER), 1)
+ def test_read_in_chunks_iterator_no_data(self):
+ iterator = DummyIterator()
+ generator1 = libcloud.utils.files.read_in_chunks(iterator=iterator,
+ yield_empty=False)
+ generator2 = libcloud.utils.files.read_in_chunks(iterator=iterator,
+ yield_empty=True)
+
+ # yield_empty=False
+ count = 0
+ for data in generator1:
+ count += 1
+ self.assertEqual(data, b(''))
+
+ self.assertEqual(count, 0)
+
+ # yield_empty=True
+ count = 0
+ for data in generator2:
+ count += 1
+ self.assertEqual(data, b(''))
+
+ self.assertEqual(count, 1)
+
def test_read_in_chunks_iterator(self):
def iterator():
for x in range(0, 1000):
http://git-wip-us.apache.org/repos/asf/libcloud/blob/26537d8d/libcloud/utils/files.py
----------------------------------------------------------------------
diff --git a/libcloud/utils/files.py b/libcloud/utils/files.py
index 4d85c8b..a71e1c4 100644
--- a/libcloud/utils/files.py
+++ b/libcloud/utils/files.py
@@ -33,7 +33,8 @@ __all__ = [
]
-def read_in_chunks(iterator, chunk_size=None, fill_size=False):
+def read_in_chunks(iterator, chunk_size=None, fill_size=False,
+ yield_empty=False):
"""
Return a generator which yields data in chunks.
@@ -48,6 +49,10 @@ def read_in_chunks(iterator, chunk_size=None, fill_size=False):
length (except for last chunk).
:type fill_size: ``bool``
+ :param yield_empty: If true and iterator returned no data, yield empty
+ bytes object before raising StopIteration.
+ :type yield_empty: ``bool``
+
TODO: At some point in the future we could use byte arrays here if version
>= Python 3. This should speed things up a bit and reduce memory usage.
"""
@@ -75,6 +80,9 @@ def read_in_chunks(iterator, chunk_size=None, fill_size=False):
empty = True
if len(data) == 0:
+ if empty and yield_empty:
+ yield b('')
+
raise StopIteration
if fill_size: