You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mesos.apache.org by be...@apache.org on 2011/06/05 10:37:04 UTC

svn commit: r1132068 [6/8] - in /incubator/mesos/trunk: ec2/ third_party/boto-1.9b/ third_party/boto-1.9b/bin/ third_party/boto-1.9b/boto.egg-info/ third_party/boto-1.9b/boto/ third_party/boto-1.9b/boto/cloudfront/ third_party/boto-1.9b/boto/contrib/ t...

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/connection.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/connection.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/connection.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/connection.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/connection.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/connection.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/connection.py Sun Jun  5 08:36:52 2011
@@ -1,4 +1,6 @@
-# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2010, Eucalyptus Systems, Inc.
+# All rights reserved.
 #
 # Permission is hereby granted, free of charge, to any person obtaining a
 # copy of this software and associated documentation files (the
@@ -23,7 +25,6 @@ import xml.sax
 import urllib, base64
 import time
 import boto.utils
-import types
 from boto.connection import AWSAuthConnection
 from boto import handler
 from boto.s3.bucket import Bucket
@@ -31,20 +32,43 @@ from boto.s3.key import Key
 from boto.resultset import ResultSet
 from boto.exception import S3ResponseError, S3CreateError, BotoClientError
 
+def check_lowercase_bucketname(n):
+    """
+    Bucket names must not contain uppercase characters. We check for
+    this by appending a lowercase character and testing with islower().
+    Note this also covers cases like numeric bucket names with dashes.
+        
+    >>> check_lowercase_bucketname("Aaaa")
+    Traceback (most recent call last):
+    ...
+    BotoClientError: S3Error: Bucket names cannot contain upper-case
+    characters when using either the sub-domain or virtual hosting calling
+    format.
+    
+    >>> check_lowercase_bucketname("1234-5678-9123")
+    True
+    >>> check_lowercase_bucketname("abcdefg1234")
+    True
+    """
+    if not (n + 'a').islower():
+        raise BotoClientError("Bucket names cannot contain upper-case " \
+            "characters when using either the sub-domain or virtual " \
+            "hosting calling format.")
+    return True
+
 def assert_case_insensitive(f):
     def wrapper(*args, **kwargs):
-        if len(args) == 3 and not (args[2].islower() or args[2].isalnum()):
-            raise BotoClientError("Bucket names cannot contain upper-case " \
-            "characters when using either the sub-domain or virtual " \
-        "hosting calling format.")
+        if len(args) == 3 and check_lowercase_bucketname(args[2]):
+            pass
         return f(*args, **kwargs)
     return wrapper
 
 class _CallingFormat:
-    def build_url_base(self, protocol, server, bucket, key=''):
+
+    def build_url_base(self, connection, protocol, server, bucket, key=''):
         url_base = '%s://' % protocol
         url_base += self.build_host(server, bucket)
-        url_base += self.build_path_base(bucket, key)
+        url_base += connection.get_path(self.build_path_base(bucket, key))
         return url_base
 
     def build_host(self, server, bucket):
@@ -63,16 +87,19 @@ class _CallingFormat:
         return '/%s' % urllib.quote(key)
 
 class SubdomainCallingFormat(_CallingFormat):
+
     @assert_case_insensitive
     def get_bucket_server(self, server, bucket):
         return '%s.%s' % (bucket, server)
 
 class VHostCallingFormat(_CallingFormat):
+
     @assert_case_insensitive
     def get_bucket_server(self, server, bucket):
         return bucket
 
 class OrdinaryCallingFormat(_CallingFormat):
+    
     def get_bucket_server(self, server, bucket):
         return server
 
@@ -85,6 +112,9 @@ class OrdinaryCallingFormat(_CallingForm
 class Location:
     DEFAULT = ''
     EU = 'EU'
+    USWest = 'us-west-1'
+
+#boto.set_stream_logger('s3')
 
 class S3Connection(AWSAuthConnection):
 
@@ -95,26 +125,40 @@ class S3Connection(AWSAuthConnection):
                  is_secure=True, port=None, proxy=None, proxy_port=None,
                  proxy_user=None, proxy_pass=None,
                  host=DefaultHost, debug=0, https_connection_factory=None,
-                 calling_format=SubdomainCallingFormat(), path='/'):
+                 calling_format=SubdomainCallingFormat(), path='/', provider='aws',
+                 bucket_class=Bucket):
         self.calling_format = calling_format
+        self.bucket_class = bucket_class
         AWSAuthConnection.__init__(self, host,
                 aws_access_key_id, aws_secret_access_key,
                 is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
                 debug=debug, https_connection_factory=https_connection_factory,
-                path=path)
+                path=path, provider=provider)
 
     def __iter__(self):
-        return self.get_all_buckets()
+        for bucket in self.get_all_buckets():
+            yield bucket
 
     def __contains__(self, bucket_name):
        return not (self.lookup(bucket_name) is None)
 
+    def set_bucket_class(self, bucket_class):
+        """
+        Set the Bucket class associated with this bucket.  By default, this
+        would be the boto.s3.key.Bucket class but if you want to subclass that
+        for some reason this allows you to associate your new class.
+        
+        :type bucket_class: class
+        :param bucket_class: A subclass of Bucket that can be more specific
+        """
+        self.bucket_class = bucket_class
+
     def build_post_policy(self, expiration_time, conditions):
         """
         Taken from the AWS book Python examples and modified for use with boto
         """
-        if type(expiration_time) != time.struct_time:
-            raise 'Policy document must include a valid expiration Time object'
+        assert type(expiration_time) == time.struct_time, \
+            'Policy document must include a valid expiration Time object'
 
         # Convert conditions object mappings to condition statements
 
@@ -211,7 +255,7 @@ class S3Connection(AWSAuthConnection):
         fields.append({"name": "key", "value": key})
 
         # HTTPS protocol will be used if the secure HTTP option is enabled.
-        url = '%s://%s.s3.amazonaws.com/' % (http_method, bucket_name)
+        url = '%s://%s.%s/' % (http_method, bucket_name, self.host)
 
         return {"action": url, "fields": fields}
 
@@ -222,18 +266,22 @@ class S3Connection(AWSAuthConnection):
             headers = {}
         expires = int(time.time() + expires_in)
         auth_path = self.calling_format.build_auth_path(bucket, key)
+        auth_path = self.get_path(auth_path)
         canonical_str = boto.utils.canonical_string(method, auth_path,
-                                                    headers, expires)
+                                                    headers, expires,
+                                                    self.provider)
         hmac_copy = self.hmac.copy()
         hmac_copy.update(canonical_str)
         b64_hmac = base64.encodestring(hmac_copy.digest()).strip()
         encoded_canonical = urllib.quote_plus(b64_hmac)
-        path = self.calling_format.build_path_base(bucket, key)
+        self.calling_format.build_path_base(bucket, key)
         if query_auth:
             query_part = '?' + self.QueryString % (encoded_canonical, expires,
                                              self.aws_access_key_id)
-            if 'x-amz-security-token' in headers:
-                query_part += '&x-amz-security-token=%s' % urllib.quote(headers['x-amz-security-token']);
+            sec_hdr = self.provider.security_token_header
+            if sec_hdr in headers:
+                query_part += ('&%s=%s' % (sec_hdr,
+                                           urllib.quote(headers[sec_hdr])));
         else:
             query_part = ''
         if force_http:
@@ -242,15 +290,15 @@ class S3Connection(AWSAuthConnection):
         else:
             protocol = self.protocol
             port = self.port
-        return self.calling_format.build_url_base(protocol, self.server_name(port),
+        return self.calling_format.build_url_base(self, protocol, self.server_name(port),
                                                   bucket, key) + query_part
 
     def get_all_buckets(self, headers=None):
         response = self.make_request('GET')
         body = response.read()
         if response.status > 300:
-            raise S3ResponseError(response.status, response.reason, body, headers=headers)
-        rs = ResultSet([('Bucket', Bucket)])
+            raise S3ResponseError(response.status, response.reason, body)
+        rs = ResultSet([('Bucket', self.bucket_class)])
         h = handler.XmlHandler(rs, self)
         xml.sax.parseString(body, h)
         return rs
@@ -270,9 +318,9 @@ class S3Connection(AWSAuthConnection):
         return rs.ID
 
     def get_bucket(self, bucket_name, validate=True, headers=None):
-        bucket = Bucket(self, bucket_name)
+        bucket = self.bucket_class(self, bucket_name)
         if validate:
-            rs = bucket.get_all_keys(headers, maxkeys=0)
+            bucket.get_all_keys(headers, maxkeys=0)
         return bucket
 
     def lookup(self, bucket_name, validate=True, headers=None):
@@ -282,7 +330,8 @@ class S3Connection(AWSAuthConnection):
             bucket = None
         return bucket
 
-    def create_bucket(self, bucket_name, headers=None, location=Location.DEFAULT, policy=None):
+    def create_bucket(self, bucket_name, headers=None,
+                      location=Location.DEFAULT, policy=None):
         """
         Creates a new located bucket. By default it's in the USA. You can pass
         Location.EU to create an European bucket.
@@ -300,11 +349,13 @@ class S3Connection(AWSAuthConnection):
         :param policy: A canned ACL policy that will be applied to the new key in S3.
              
         """
+        check_lowercase_bucketname(bucket_name)
+
         if policy:
             if headers:
-                headers['x-amz-acl'] = policy
+                headers[self.provider.acl_header] = policy
             else:
-                headers = {'x-amz-acl' : policy}
+                headers = {self.provider.acl_header : policy}
         if location == Location.DEFAULT:
             data = ''
         else:
@@ -316,7 +367,7 @@ class S3Connection(AWSAuthConnection):
         if response.status == 409:
             raise S3CreateError(response.status, response.reason, body)
         if response.status == 200:
-            return Bucket(self, bucket_name)
+            return self.bucket_class(self, bucket_name)
         else:
             raise S3ResponseError(response.status, response.reason, body)
 
@@ -328,7 +379,7 @@ class S3Connection(AWSAuthConnection):
 
     def make_request(self, method, bucket='', key='', headers=None, data='',
             query_args=None, sender=None):
-        if isinstance(bucket, Bucket):
+        if isinstance(bucket, self.bucket_class):
             bucket = bucket.name
         if isinstance(key, Key):
             key = key.name

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/deletemarker.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/prefix.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/deletemarker.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/deletemarker.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/prefix.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/prefix.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/deletemarker.py Sun Jun  5 08:36:52 2011
@@ -1,4 +1,4 @@
-# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
 #
 # Permission is hereby granted, free of charge, to any person obtaining a
 # copy of this software and associated documentation files (the
@@ -19,17 +19,38 @@
 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 # IN THE SOFTWARE.
 
-class Prefix:
+from boto.s3.user import User
+
+class DeleteMarker:
     def __init__(self, bucket=None, name=None):
         self.bucket = bucket
         self.name = name
+        self.is_latest = False
+        self.last_modified = None
+        self.owner = None
 
     def startElement(self, name, attrs, connection):
-        return None
+        if name == 'Owner':
+            self.owner = User(self)
+            return self.owner
+        else:
+            return None
 
     def endElement(self, name, value, connection):
-        if name == 'Prefix':
-            self.name = value
+        if name == 'Key':
+            self.name = value.encode('utf-8')
+        elif name == 'IsLatest':
+            if value == 'true':
+                self.is_lastest = True
+            else:
+                self.is_latest = False
+        elif name == 'LastModified':
+            self.last_modified = value
+        elif name == 'Owner':
+            pass
+        elif name == 'VersionId':
+            self.version_id = value
         else:
             setattr(self, name, value)
 
+

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/key.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/key.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/key.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/key.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/key.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/key.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/key.py Sun Jun  5 08:36:52 2011
@@ -27,12 +27,13 @@ import base64
 import boto.utils
 from boto.exception import S3ResponseError, S3DataError, BotoClientError
 from boto.s3.user import User
-from boto import UserAgent, config
+from boto import UserAgent
 try:
     from hashlib import md5
 except ImportError:
     from md5 import md5
 
+
 class Key(object):
 
     DefaultContentType = 'application/octet-stream'
@@ -43,19 +44,23 @@ class Key(object):
         self.bucket = bucket
         self.name = name
         self.metadata = {}
+        self.cache_control = None
         self.content_type = self.DefaultContentType
         self.content_encoding = None
         self.filename = None
         self.etag = None
         self.last_modified = None
         self.owner = None
-        self.storage_class = None
+        self.storage_class = 'STANDARD'
         self.md5 = None
         self.base64md5 = None
         self.path = None
         self.resp = None
         self.mode = None
         self.size = None
+        self.version_id = None
+        self.source_version_id = None
+        self.delete_marker = False
 
     def __repr__(self):
         if self.bucket:
@@ -78,6 +83,27 @@ class Key(object):
     def __iter__(self):
         return self
 
+    def get_md5_from_hexdigest(self, md5_hexdigest):
+        """
+        A utility function to create the 2-tuple (md5hexdigest, base64md5)
+        from just having a precalculated md5_hexdigest.
+        """
+        import binascii
+        digest = binascii.unhexlify(md5_hexdigest)
+        base64md5 = base64.encodestring(digest)
+        if base64md5[-1] == '\n':
+            base64md5 = base64md5[0:-1]
+        return (md5_hexdigest, base64md5)
+    
+    def handle_version_headers(self, resp):
+        provider = self.bucket.connection.provider
+        self.version_id = resp.getheader(provider.version_id, None)
+        self.source_version_id = resp.getheader(provider.copy_source_version_id, None)
+        if resp.getheader(provider.delete_marker, 'false') == 'true':
+            self.delete_marker = True
+        else:
+            self.delete_marker = False
+
     def open_read(self, headers=None, query_args=None):
         """
         Open this key for reading
@@ -91,11 +117,17 @@ class Key(object):
         if self.resp == None:
             self.mode = 'r'
             
-            self.resp = self.bucket.connection.make_request('GET', self.bucket.name, self.name, headers, query_args=query_args)
+            self.resp = self.bucket.connection.make_request('GET',
+                                                            self.bucket.name,
+                                                            self.name, headers,
+                                                            query_args=query_args)
             if self.resp.status < 199 or self.resp.status > 299:
-                raise S3ResponseError(self.resp.status, self.resp.reason)
+                body = self.resp.read()
+                raise S3ResponseError(self.resp.status, self.resp.reason, body)
             response_headers = self.resp.msg
-            self.metadata = boto.utils.get_aws_metadata(response_headers)
+            provider = self.bucket.connection.provider
+            self.metadata = boto.utils.get_aws_metadata(response_headers,
+                                                        provider)
             for name,value in response_headers.items():
                 if name.lower() == 'content-length':
                     self.size = int(value)
@@ -107,6 +139,9 @@ class Key(object):
                     self.content_encoding = value
                 elif name.lower() == 'last-modified':
                     self.last_modified = value
+                elif name.lower() == 'cache-control':
+                    self.cache_control = value
+            self.handle_version_headers(self.resp)
 
     def open_write(self, headers=None):
         """
@@ -162,7 +197,33 @@ class Key(object):
             self.close()
         return data
 
-    def copy(self, dst_bucket, dst_key, metadata=None):
+    def change_storage_class(self, new_storage_class, dst_bucket=None):
+        """
+        Change the storage class of an existing key.
+        Depending on whether a different destination bucket is supplied
+        or not, this will either move the item within the bucket, preserving
+        all metadata and ACL info bucket changing the storage class or it
+        will copy the item to the provided destination bucket, also
+        preserving metadata and ACL info.
+
+        :type new_storage_class: string
+        :param new_storage_class: The new storage class for the Key.
+                                  Possible values are:
+                                  * STANDARD
+                                  * REDUCED_REDUNDANCY
+
+        :type dst_bucket: string
+        :param dst_bucket: The name of a destination bucket.  If not
+                           provided the current bucket of the key
+                           will be used.
+                                  
+        """
+        self.storage_class = new_storage_class
+        return self.copy(self.bucket.name, self.name,
+                         reduced_redundancy=True, preserve_acl=True)
+
+    def copy(self, dst_bucket, dst_key, metadata=None,
+             reduced_redundancy=False, preserve_acl=False):
         """
         Copy this Key to another bucket.
 
@@ -170,7 +231,7 @@ class Key(object):
         :param dst_bucket: The name of the destination bucket
 
         :type dst_key: string
-        :param dst_key: The name of the destinatino key
+        :param dst_key: The name of the destination key
         
         :type metadata: dict
         :param metadata: Metadata to be associated with new key.
@@ -179,11 +240,41 @@ class Key(object):
                          If no metadata is supplied, the source key's
                          metadata will be copied to the new key.
 
+        :type reduced_redundancy: bool
+        :param reduced_redundancy: If True, this will force the storage
+                                   class of the new Key to be
+                                   REDUCED_REDUNDANCY regardless of the
+                                   storage class of the key being copied.
+                                   The Reduced Redundancy Storage (RRS)
+                                   feature of S3, provides lower
+                                   redundancy at lower storage cost.
+
+        :type preserve_acl: bool
+        :param preserve_acl: If True, the ACL from the source key
+                             will be copied to the destination
+                             key.  If False, the destination key
+                             will have the default ACL.
+                             Note that preserving the ACL in the
+                             new key object will require two
+                             additional API calls to S3, one to
+                             retrieve the current ACL and one to
+                             set that ACL on the new object.  If
+                             you don't care about the ACL, a value
+                             of False will be significantly more
+                             efficient.
+
         :rtype: :class:`boto.s3.key.Key` or subclass
         :returns: An instance of the newly created key object
         """
         dst_bucket = self.bucket.connection.lookup(dst_bucket)
-        return dst_bucket.copy_key(dst_key, self.bucket.name, self.name, metadata)
+        if reduced_redundancy:
+            storage_class = 'REDUCED_REDUNDANCY'
+        else:
+            storage_class = self.storage_class
+        return dst_bucket.copy_key(dst_key, self.bucket.name,
+                                   self.name, metadata,
+                                   storage_class=storage_class,
+                                   preserve_acl=preserve_acl)
 
     def startElement(self, name, attrs, connection):
         if name == 'Owner':
@@ -205,6 +296,8 @@ class Key(object):
             self.storage_class = value
         elif name == 'Owner':
             pass
+        elif name == 'VersionId':
+            self.version_id = value
         else:
             setattr(self, name, value)
 
@@ -296,10 +389,13 @@ class Key(object):
                     been successfully transmitted to S3 and the second representing
                     the total number of bytes that need to be transmitted.
                     
-        :type cb: int
-        :param num_cb: (optional) If a callback is specified with the cb parameter
-             this parameter determines the granularity of the callback by defining
-             the maximum number of times the callback will be called during the file transfer.  
+        :type num_cb: int
+        :param num_cb: (optional) If a callback is specified with the cb
+                       parameter this parameter determines the granularity
+                       of the callback by defining the maximum number of
+                       times the callback will be called during the file
+                       transfer. Providing a negative integer will cause
+                       your callback to be called with each buffer read.
              
         """
         def sender(http_conn, method, path, data, headers):
@@ -313,6 +409,8 @@ class Key(object):
             if cb:
                 if num_cb > 2:
                     cb_count = self.size / self.BufferSize / (num_cb-2)
+                elif num_cb < 0:
+                    cb_count = -1
                 else:
                     cb_count = 0
                 i = total_bytes = 0
@@ -323,7 +421,7 @@ class Key(object):
                 if cb:
                     total_bytes += len(l)
                     i += 1
-                    if i == cb_count:
+                    if i == cb_count or cb_count == -1:
                         cb(total_bytes, self.size)
                         i = 0
                 l = fp.read(self.BufferSize)
@@ -351,8 +449,13 @@ class Key(object):
             headers = headers.copy()
         headers['User-Agent'] = UserAgent
         headers['Content-MD5'] = self.base64md5
+        if self.storage_class != 'STANDARD':
+            provider = self.bucket.connection.provider
+            headers[provider.storage_class_header] = self.storage_class
         if headers.has_key('Content-Type'):
             self.content_type = headers['Content-Type']
+        if headers.has_key('Content-Encoding'):
+            self.content_encoding = headers['Content-Encoding']
         elif self.path:
             self.content_type = mimetypes.guess_type(self.path)[0]
             if self.content_type == None:
@@ -362,9 +465,12 @@ class Key(object):
             headers['Content-Type'] = self.content_type
         headers['Content-Length'] = str(self.size)
         headers['Expect'] = '100-Continue'
-        headers = boto.utils.merge_meta(headers, self.metadata)
-        return self.bucket.connection.make_request('PUT', self.bucket.name,
-                self.name, headers, sender=sender)
+        headers = boto.utils.merge_meta(headers, self.metadata,
+                                        self.bucket.connection.provider)
+        resp = self.bucket.connection.make_request('PUT', self.bucket.name,
+                                                   self.name, headers,
+                                                   sender=sender)
+        self.handle_version_headers(resp)
 
     def compute_md5(self, fp):
         """
@@ -391,8 +497,9 @@ class Key(object):
         fp.seek(0)
         return (hex_md5, base64md5)
 
-    def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, num_cb=10,
-                               policy=None, md5=None):
+    def set_contents_from_file(self, fp, headers=None, replace=True,
+                               cb=None, num_cb=10, policy=None, md5=None,
+                               reduced_redundancy=False):
         """
         Store an object in S3 using the name of the Key object as the
         key in S3 and the contents of the file pointed to by 'fp' as the
@@ -433,17 +540,35 @@ class Key(object):
         :param md5: If you need to compute the MD5 for any reason prior to upload,
                     it's silly to have to do it twice so this param, if present, will be
                     used as the MD5 values of the file.  Otherwise, the checksum will be computed.
+        :type reduced_redundancy: bool
+        :param reduced_redundancy: If True, this will set the storage
+                                   class of the new Key to be
+                                   REDUCED_REDUNDANCY. The Reduced Redundancy
+                                   Storage (RRS) feature of S3, provides lower
+                                   redundancy at lower storage cost.
+
         """
+        provider = self.bucket.connection.provider
+        if headers is None:
+            headers = {}
         if policy:
-            if headers:
-                headers['x-amz-acl'] = policy
-            else:
-                headers = {'x-amz-acl' : policy}
+            headers[provider.acl_header] = policy
+        if reduced_redundancy:
+            self.storage_class = 'REDUCED_REDUNDANCY'
+            if provider.storage_class_header:
+                headers[provider.storage_class_header] = self.storage_class
+                # TODO - What if the provider doesn't support reduced reduncancy?
+                # What if different providers provide different classes?
         if hasattr(fp, 'name'):
             self.path = fp.name
         if self.bucket != None:
             if not md5:
                 md5 = self.compute_md5(fp)
+            else:
+                # even if md5 is provided, still need to set size of content
+                fp.seek(0, 2)
+                self.size = fp.tell()
+                fp.seek(0)
             self.md5 = md5[0]
             self.base64md5 = md5[1]
             if self.name == None:
@@ -454,8 +579,9 @@ class Key(object):
                     return
             self.send_file(fp, headers, cb, num_cb)
 
-    def set_contents_from_filename(self, filename, headers=None, replace=True, cb=None, num_cb=10,
-                                   policy=None, md5=None):
+    def set_contents_from_filename(self, filename, headers=None, replace=True,
+                                   cb=None, num_cb=10, policy=None, md5=None,
+                                   reduced_redundancy=False):
         """
         Store an object in S3 using the name of the Key object as the
         key in S3 and the contents of the file named by 'filename'.
@@ -493,13 +619,22 @@ class Key(object):
         :param md5: If you need to compute the MD5 for any reason prior to upload,
                     it's silly to have to do it twice so this param, if present, will be
                     used as the MD5 values of the file.  Otherwise, the checksum will be computed.
+                    
+        :type reduced_redundancy: bool
+        :param reduced_redundancy: If True, this will set the storage
+                                   class of the new Key to be
+                                   REDUCED_REDUNDANCY. The Reduced Redundancy
+                                   Storage (RRS) feature of S3, provides lower
+                                   redundancy at lower storage cost.
         """
         fp = open(filename, 'rb')
-        self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy)
+        self.set_contents_from_file(fp, headers, replace, cb, num_cb,
+                                    policy, md5, reduced_redundancy)
         fp.close()
 
-    def set_contents_from_string(self, s, headers=None, replace=True, cb=None, num_cb=10,
-                                 policy=None, md5=None):
+    def set_contents_from_string(self, s, headers=None, replace=True,
+                                 cb=None, num_cb=10, policy=None, md5=None,
+                                 reduced_redundancy=False):
         """
         Store an object in S3 using the name of the Key object as the
         key in S3 and the string 's' as the contents.
@@ -534,12 +669,22 @@ class Key(object):
         :param md5: If you need to compute the MD5 for any reason prior to upload,
                     it's silly to have to do it twice so this param, if present, will be
                     used as the MD5 values of the file.  Otherwise, the checksum will be computed.
+                    
+        :type reduced_redundancy: bool
+        :param reduced_redundancy: If True, this will set the storage
+                                   class of the new Key to be
+                                   REDUCED_REDUNDANCY. The Reduced Redundancy
+                                   Storage (RRS) feature of S3, provides lower
+                                   redundancy at lower storage cost.
         """
         fp = StringIO.StringIO(s)
-        self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy)
+        r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
+                                        policy, md5, reduced_redundancy)
         fp.close()
+        return r
 
-    def get_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False):
+    def get_file(self, fp, headers=None, cb=None, num_cb=10,
+                 torrent=False, version_id=None):
         """
         Retrieves a file from an S3 Key
         
@@ -576,8 +721,12 @@ class Key(object):
         if self.bucket.connection.debug == 1:
             self.bucket.connection.debug = 0
         
-        if torrent: torrent = "torrent"
-        self.open('r', headers, query_args=torrent)
+        query_args = ''
+        if torrent:
+            query_args = 'torrent'
+        elif version_id:
+            query_args = 'versionId=%s' % version_id
+        self.open('r', headers, query_args=query_args)
         for bytes in self:
             fp.write(bytes)
             if cb:
@@ -612,7 +761,10 @@ class Key(object):
         """
         return self.get_file(fp, headers, cb, num_cb, torrent=True)
     
-    def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False):
+    def get_contents_to_file(self, fp, headers=None,
+                             cb=None, num_cb=10,
+                             torrent=False,
+                             version_id=None):
         """
         Retrieve an object from S3 using the name of the Key object as the
         key in S3.  Write the contents of the object to the file pointed
@@ -642,9 +794,13 @@ class Key(object):
 
         """
         if self.bucket != None:
-            self.get_file(fp, headers, cb, num_cb, torrent=torrent)
+            self.get_file(fp, headers, cb, num_cb, torrent=torrent,
+                          version_id=version_id)
 
-    def get_contents_to_filename(self, filename, headers=None, cb=None, num_cb=10, torrent=False):
+    def get_contents_to_filename(self, filename, headers=None,
+                                 cb=None, num_cb=10,
+                                 torrent=False,
+                                 version_id=None):
         """
         Retrieve an object from S3 using the name of the Key object as the
         key in S3.  Store contents of the object to a file named by 'filename'.
@@ -675,7 +831,8 @@ class Key(object):
         
         """
         fp = open(filename, 'wb')
-        self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent)
+        self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
+                                  version_id=version_id)
         fp.close()
         # if last_modified date was sent from s3, try to set file's timestamp
         if self.last_modified != None:
@@ -685,7 +842,10 @@ class Key(object):
                 os.utime(fp.name, (modified_stamp, modified_stamp))
             except Exception: pass
 
-    def get_contents_as_string(self, headers=None, cb=None, num_cb=10, torrent=False):
+    def get_contents_as_string(self, headers=None,
+                               cb=None, num_cb=10,
+                               torrent=False,
+                               version_id=None):
         """
         Retrieve an object from S3 using the name of the Key object as the
         key in S3.  Return the contents of the object as a string.
@@ -720,10 +880,11 @@ class Key(object):
         :returns: The contents of the file as a string
         """
         fp = StringIO.StringIO()
-        self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent)
+        self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
+                                  version_id=version_id)
         return fp.getvalue()
 
-    def add_email_grant(self, permission, email_address):
+    def add_email_grant(self, permission, email_address, headers=None):
         """
         Convenience method that provides a quick way to add an email grant to a key.
         This method retrieves the current ACL, creates a new grant based on the parameters
@@ -739,9 +900,9 @@ class Key(object):
         :param email_address: The email address associated with the AWS account your are granting
                                 the permission to.
         """
-        policy = self.get_acl()
+        policy = self.get_acl(headers=headers)
         policy.acl.add_email_grant(permission, email_address)
-        self.set_acl(policy)
+        self.set_acl(policy, headers=headers)
 
     def add_user_grant(self, permission, user_id):
         """

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/prefix.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/prefix.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/prefix.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/prefix.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/prefix.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
    (empty)

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/user.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/user.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/user.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/s3/user.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/s3/user.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
    (empty)

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/__init__.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/__init__.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/__init__.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/__init__.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/__init__.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/__init__.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/__init__.py Sun Jun  5 08:36:52 2011
@@ -20,9 +20,9 @@
 # IN THE SOFTWARE.
 #
 
-import boto
 from regioninfo import SDBRegionInfo
 
+
 def regions():
     """
     Get all available regions for the SDB service.
@@ -30,9 +30,15 @@ def regions():
     :rtype: list
     :return: A list of :class:`boto.sdb.regioninfo.RegionInfo`
     """
-    return [SDBRegionInfo(name='us-east-1', endpoint='sdb.amazonaws.com'),
-            SDBRegionInfo(name='eu-west-1', endpoint='sdb.eu-west-1.amazonaws.com'),
-            SDBRegionInfo(name='us-west-1', endpoint='sdb.us-west-1.amazonaws.com')]
+    return [SDBRegionInfo(name='us-east-1',
+                          endpoint='sdb.amazonaws.com'),
+            SDBRegionInfo(name='eu-west-1',
+                          endpoint='sdb.eu-west-1.amazonaws.com'),
+            SDBRegionInfo(name='us-west-1',
+                          endpoint='sdb.us-west-1.amazonaws.com'),
+            SDBRegionInfo(name='ap-southeast-1',
+                          endpoint='sdb.ap-southeast-1.amazonaws.com')
+            ]
 
 def connect_to_region(region_name):
     for region in regions():

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/connection.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/connection.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/connection.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/connection.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/connection.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/connection.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/connection.py Sun Jun  5 08:36:52 2011
@@ -19,10 +19,8 @@
 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 # IN THE SOFTWARE.
 
-import urllib
 import xml.sax
 import threading
-import boto
 from boto import handler
 from boto.connection import AWSQueryConnection
 from boto.sdb.domain import Domain, DomainMetaData
@@ -32,6 +30,7 @@ from boto.exception import SDBResponseEr
 from boto.resultset import ResultSet
 import warnings
 
+
 class ItemThread(threading.Thread):
     
     def __init__(self, name, domain_name, item_names):
@@ -47,11 +46,13 @@ class ItemThread(threading.Thread):
             item = self.conn.get_attributes(self.domain_name, item_name)
             self.items.append(item)
 
+#boto.set_stream_logger('sdb')
+
 class SDBConnection(AWSQueryConnection):
 
     DefaultRegionName = 'us-east-1'
     DefaultRegionEndpoint = 'sdb.amazonaws.com'
-    APIVersion = '2007-11-07'
+    APIVersion = '2009-04-15'
     SignatureVersion = '2'
     ResponseError = SDBResponseError
 
@@ -72,7 +73,8 @@ class SDBConnection(AWSQueryConnection):
     def set_item_cls(self, cls):
         self.item_cls = cls
 
-    def build_name_value_list(self, params, attributes, replace=False):
+    def build_name_value_list(self, params, attributes, replace=False,
+                              label='Attribute'):
         keys = attributes.keys()
         keys.sort()
         i = 1
@@ -80,22 +82,32 @@ class SDBConnection(AWSQueryConnection):
             value = attributes[key]
             if isinstance(value, list):
                 for v in value:
-                    params['Attribute.%d.Name'%i] = key
+                    params['%s.%d.Name'%(label,i)] = key
                     if self.converter:
                         v = self.converter.encode(v)
-                    params['Attribute.%d.Value'%i] = v
+                    params['%s.%d.Value'%(label,i)] = v
                     if replace:
-                        params['Attribute.%d.Replace'%i] = 'true'
+                        params['%s.%d.Replace'%(label,i)] = 'true'
                     i += 1
             else:
-                params['Attribute.%d.Name'%i] = key
+                params['%s.%d.Name'%(label,i)] = key
                 if self.converter:
                     value = self.converter.encode(value)
-                params['Attribute.%d.Value'%i] = value
+                params['%s.%d.Value'%(label,i)] = value
                 if replace:
-                    params['Attribute.%d.Replace'%i] = 'true'
+                    params['%s.%d.Replace'%(label,i)] = 'true'
             i += 1
 
+    def build_expected_value(self, params, expected_value):
+        params['Expected.1.Name'] = expected_value[0]
+        if expected_value[1] == True:
+            params['Expected.1.Exists'] = 'true'
+        elif expected_value[1] == False:
+            params['Expected.1.Exists'] = 'false'
+        else:
+            params['Expected.1.Value'] = expected_value[1]
+            
+
     def build_batch_list(self, params, items, replace=False):
         item_names = items.keys()
         i = 0
@@ -232,7 +244,8 @@ class SDBConnection(AWSQueryConnection):
         d.domain = domain
         return d
         
-    def put_attributes(self, domain_or_name, item_name, attributes, replace=True):
+    def put_attributes(self, domain_or_name, item_name, attributes,
+                       replace=True, expected_value=None):
         """
         Store attributes for a given item in a domain.
 
@@ -245,6 +258,21 @@ class SDBConnection(AWSQueryConnection):
         :type attribute_names: dict or dict-like object
         :param attribute_names: The name/value pairs to store as attributes
 
+        :type expected_value: list
+        :param expected_value: If supplied, this is a list or tuple consisting
+                               of a single attribute name and expected value.
+                               The list can be of the form:
+                                * ['name', 'value']
+                               In which case the call will first verify
+                               that the attribute "name" of this item has
+                               a value of "value".  If it does, the delete
+                               will proceed, otherwise a ConditionalCheckFailed
+                               error will be returned.
+                               The list can also be of the form:
+                                * ['name', True|False]
+                               which will simply check for the existence (True)
+                               or non-existencve (False) of the attribute.
+
         :type replace: bool
         :param replace: Whether the attribute values passed in will replace
                         existing values or will be added as addition values.
@@ -257,6 +285,8 @@ class SDBConnection(AWSQueryConnection):
         params = {'DomainName' : domain_name,
                   'ItemName' : item_name}
         self.build_name_value_list(params, attributes, replace)
+        if expected_value:
+            self.build_expected_value(params, expected_value)
         return self.get_status('PutAttributes', params)
 
     def batch_put_attributes(self, domain_or_name, items, replace=True):
@@ -286,7 +316,8 @@ class SDBConnection(AWSQueryConnection):
         self.build_batch_list(params, items, replace)
         return self.get_status('BatchPutAttributes', params, verb='POST')
 
-    def get_attributes(self, domain_or_name, item_name, attribute_names=None, item=None):
+    def get_attributes(self, domain_or_name, item_name, attribute_names=None,
+                       consistent_read=False, item=None):
         """
         Retrieve attributes for a given item in a domain.
 
@@ -301,12 +332,18 @@ class SDBConnection(AWSQueryConnection):
                                 parameter is optional.  If not supplied, all attributes
                                 will be retrieved for the item.
 
+        :type consistent_read: bool
+        :param consistent_read: When set to true, ensures that the most recent
+                                data is returned.
+
         :rtype: :class:`boto.sdb.item.Item`
         :return: An Item mapping type containing the requested attribute name/values
         """
         domain, domain_name = self.get_domain_and_name(domain_or_name)
         params = {'DomainName' : domain_name,
                   'ItemName' : item_name}
+        if consistent_read:
+            params['ConsistentRead'] = 'true'
         if attribute_names:
             if not isinstance(attribute_names, list):
                 attribute_names = [attribute_names]
@@ -322,7 +359,8 @@ class SDBConnection(AWSQueryConnection):
         else:
             raise SDBResponseError(response.status, response.reason, body)
         
-    def delete_attributes(self, domain_or_name, item_name, attr_names=None):
+    def delete_attributes(self, domain_or_name, item_name, attr_names=None,
+                          expected_value=None):
         """
         Delete attributes from a given item in a domain.
 
@@ -339,6 +377,21 @@ class SDBConnection(AWSQueryConnection):
                            of values to delete as the value.  If no value is supplied,
                            all attribute name/values for the item will be deleted.
                            
+        :type expected_value: list
+        :param expected_value: If supplied, this is a list or tuple consisting
+                               of a single attribute name and expected value.
+                               The list can be of the form:
+                                * ['name', 'value']
+                               In which case the call will first verify
+                               that the attribute "name" of this item has
+                               a value of "value".  If it does, the delete
+                               will proceed, otherwise a ConditionalCheckFailed
+                               error will be returned.
+                               The list can also be of the form:
+                                * ['name', True|False]
+                               which will simply check for the existence (True)
+                               or non-existencve (False) of the attribute.
+
         :rtype: bool
         :return: True if successful
         """
@@ -350,73 +403,12 @@ class SDBConnection(AWSQueryConnection):
                 self.build_name_list(params, attr_names)
             elif isinstance(attr_names, dict) or isinstance(attr_names, self.item_cls):
                 self.build_name_value_list(params, attr_names)
+        if expected_value:
+            self.build_expected_value(params, expected_value)
         return self.get_status('DeleteAttributes', params)
         
-    def query(self, domain_or_name, query='', max_items=None, next_token=None):
-        """
-        Returns a list of item names within domain_name that match the query.
-        
-        :type domain_or_name: string or :class:`boto.sdb.domain.Domain` object.
-        :param domain_or_name: Either the name of a domain or a Domain object
-
-        :type query: string
-        :param query: The SimpleDB query to be performed.
-
-        :type max_items: int
-        :param max_items: The maximum number of items to return.  If not
-                          supplied, the default is None which returns all
-                          items matching the query.
-
-        :rtype: ResultSet
-        :return: An iterator containing the results.
-        """
-        warnings.warn('Query interface is deprecated', DeprecationWarning)
-        domain, domain_name = self.get_domain_and_name(domain_or_name)
-        params = {'DomainName':domain_name,
-                  'QueryExpression' : query}
-        if max_items:
-            params['MaxNumberOfItems'] = max_items
-        if next_token:
-            params['NextToken'] = next_token
-        return self.get_object('Query', params, ResultSet)
-
-    def query_with_attributes(self, domain_or_name, query='', attr_names=None,
-                              max_items=None, next_token=None):
-        """
-        Returns a set of Attributes for item names within domain_name that match the query.
-        
-        :type domain_or_name: string or :class:`boto.sdb.domain.Domain` object.
-        :param domain_or_name: Either the name of a domain or a Domain object
-
-        :type query: string
-        :param query: The SimpleDB query to be performed.
-
-        :type attr_names: list
-        :param attr_names: The name of the attributes to be returned.
-                           If no attributes are specified, all attributes
-                           will be returned.
-
-        :type max_items: int
-        :param max_items: The maximum number of items to return.  If not
-                          supplied, the default is None which returns all
-                          items matching the query.
-
-        :rtype: ResultSet
-        :return: An iterator containing the results.
-        """
-        warnings.warn('Query interface is deprecated', DeprecationWarning)
-        domain, domain_name = self.get_domain_and_name(domain_or_name)
-        params = {'DomainName':domain_name,
-                  'QueryExpression' : query}
-        if max_items:
-            params['MaxNumberOfItems'] = max_items
-        if next_token:
-            params['NextToken'] = next_token
-        if attr_names:
-            self.build_list_params(params, attr_names, 'AttributeName')
-        return self.get_list('QueryWithAttributes', params, [('Item', self.item_cls)], parent=domain)
-
-    def select(self, domain_or_name, query='', next_token=None):
+    def select(self, domain_or_name, query='', next_token=None,
+               consistent_read=False):
         """
         Returns a set of Attributes for item names within domain_name that match the query.
         The query must be expressed in using the SELECT style syntax rather than the
@@ -431,41 +423,19 @@ class SDBConnection(AWSQueryConnection):
         :type query: string
         :param query: The SimpleDB query to be performed.
 
+        :type consistent_read: bool
+        :param consistent_read: When set to true, ensures that the most recent
+                                data is returned.
+
         :rtype: ResultSet
         :return: An iterator containing the results.
         """
         domain, domain_name = self.get_domain_and_name(domain_or_name)
         params = {'SelectExpression' : query}
+        if consistent_read:
+            params['ConsistentRead'] = 'true'
         if next_token:
             params['NextToken'] = next_token
-        return self.get_list('Select', params, [('Item', self.item_cls)], parent=domain)
-
-    def threaded_query(self, domain_or_name, query='', max_items=None, next_token=None, num_threads=6):
-        """
-        Returns a list of fully populated items that match the query provided.
-
-        The name/value pairs for all of the matching item names are retrieved in a number of separate
-        threads (specified by num_threads) to achieve maximum throughput.
-        The ResultSet that is returned has an attribute called next_token that can be used
-        to retrieve additional results for the same query.
-        """
-        domain, domain_name = self.get_domain_and_name(domain_or_name)
-        if max_items and num_threads > max_items:
-            num_threads = max_items
-        rs = self.query(domain_or_name, query, max_items, next_token)
-        threads = []
-        n = len(rs) / num_threads
-        for i in range(0, num_threads):
-            if i+1 == num_threads:
-                thread = ItemThread('Thread-%d' % i, domain_name, rs[n*i:])
-            else:
-                thread = ItemThread('Thread-%d' % i, domain_name, rs[n*i:n*(i+1)])
-            threads.append(thread)
-            thread.start()
-        del rs[0:]
-        for thread in threads:
-            thread.join()
-            for item in thread.items:
-                rs.append(item)
-        return rs
+        return self.get_list('Select', params, [('Item', self.item_cls)],
+                             parent=domain)
 

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/__init__.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/__init__.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/__init__.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/__init__.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/__init__.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
    (empty)

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/blob.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/blob.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/blob.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/blob.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/blob.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/blob.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/blob.py Sun Jun  5 08:36:52 2011
@@ -38,9 +38,13 @@ class Blob(object):
 
     def __str__(self):
         if hasattr(self.file, "get_contents_as_string"):
-            return str(self.file.get_contents_as_string())
+            value = self.file.get_contents_as_string()
         else:
-            return str(self.file.getvalue())
+            value = self.file.getvalue()
+        try:
+            return str(value)
+        except:
+            return unicode(value)
 
     def read(self):
         return self.file.read()
@@ -49,7 +53,7 @@ class Blob(object):
         return self.file.readline()
 
     def next(self):
-        return sefl.file.next()
+        return self.file.next()
 
     def __iter__(self):
         return iter(self.file)

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/key.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/key.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/key.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/key.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/key.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
    (empty)

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/__init__.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/__init__.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/__init__.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/__init__.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/__init__.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/__init__.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/__init__.py Sun Jun  5 08:36:52 2011
@@ -66,6 +66,8 @@ def get_manager(cls):
         db_port = boto.config.getint(db_section, 'db_port', db_port)
         enable_ssl = boto.config.getint(db_section, 'enable_ssl', enable_ssl)
         debug = boto.config.getint(db_section, 'debug', debug)
+    elif hasattr(cls.__bases__[0], "_manager"):
+        return cls.__bases__[0]._manager
     if db_type == 'SimpleDB':
         from sdbmanager import SDBManager
         return SDBManager(cls, db_name, db_user, db_passwd,

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/pgmanager.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/pgmanager.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/pgmanager.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/pgmanager.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/pgmanager.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/pgmanager.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/pgmanager.py Sun Jun  5 08:36:52 2011
@@ -22,8 +22,10 @@ from boto.sdb.db.key import Key
 from boto.sdb.db.model import Model
 import psycopg2
 import psycopg2.extensions
-import uuid, sys, os, string
-from boto.exception import *
+import uuid
+import os
+import string
+from boto.exception import SDBPersistenceError
 
 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
 
@@ -286,7 +288,7 @@ class PGManager(object):
                 qs += "%s=" % name
                 qs += "%s"
         if not found:
-            raise SDBPersistenceError('%s is not a valid field' % key)
+            raise SDBPersistenceError('%s is not a valid field' % name)
         qs += ';'
         print qs
         self.cursor.execute(qs, values)
@@ -313,7 +315,7 @@ class PGManager(object):
                         value = self.encode_value(property, value)
                         parts.append(""""%s"%s'%s'""" % (name, op, value))
                 if not found:
-                    raise SDBPersistenceError('%s is not a valid field' % key)
+                    raise SDBPersistenceError('%s is not a valid field' % name)
             qs += ','.join(parts)
         qs += ';'
         print qs

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/sdbmanager.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/sdbmanager.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/sdbmanager.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/sdbmanager.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/sdbmanager.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/sdbmanager.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/sdbmanager.py Sun Jun  5 08:36:52 2011
@@ -1,4 +1,5 @@
 # Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2010 Chris Moyer http://coredumped.org/
 #
 # Permission is hereby granted, free of charge, to any person obtaining a
 # copy of this software and associated documentation files (the
@@ -26,12 +27,12 @@ from boto.sdb.db.key import Key
 from boto.sdb.db.model import Model
 from boto.sdb.db.blob import Blob
 from boto.sdb.db.property import ListProperty, MapProperty
-from datetime import datetime
+from datetime import datetime, date
 from boto.exception import SDBPersistenceError
-from tempfile import TemporaryFile
 
 ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
 
+
 class SDBConverter:
     """
     Responsible for converting base Python types to format compatible with underlying
@@ -53,10 +54,16 @@ class SDBConverter:
                           Model : (self.encode_reference, self.decode_reference),
                           Key : (self.encode_reference, self.decode_reference),
                           datetime : (self.encode_datetime, self.decode_datetime),
+                          date : (self.encode_date, self.decode_date),
                           Blob: (self.encode_blob, self.decode_blob),
                       }
 
     def encode(self, item_type, value):
+        try:
+            if Model in item_type.mro():
+                item_type = Model
+        except:
+            pass
         if item_type in self.type_map:
             encode = self.type_map[item_type][0]
             return encode(value)
@@ -69,17 +76,25 @@ class SDBConverter:
         return value
 
     def encode_list(self, prop, value):
+        if value in (None, []):
+            return []
         if not isinstance(value, list):
-            value = [value]
-        new_value = []
-        for v in value:
+            # This is a little trick to avoid encoding when it's just a single value,
+            # since that most likely means it's from a query
             item_type = getattr(prop, "item_type")
-            if Model in item_type.mro():
-                item_type = Model
-            new_value.append(self.encode(item_type, v))
-        return new_value
+            return self.encode(item_type, value)
+        # Just enumerate(value) won't work here because
+        # we need to add in some zero padding
+        # We support lists up to 1,000 attributes, since
+        # SDB technically only supports 1024 attributes anyway
+        values = {}
+        for k,v in enumerate(value):
+            values["%03d" % k] = v
+        return self.encode_map(prop, values)
 
     def encode_map(self, prop, value):
+        if value == None:
+            return None
         if not isinstance(value, dict):
             raise ValueError, 'Expected a dict value, got %s' % type(value)
         new_value = []
@@ -88,7 +103,8 @@ class SDBConverter:
             if Model in item_type.mro():
                 item_type = Model
             encoded_value = self.encode(item_type, value[key])
-            new_value.append('%s:%s' % (key, encoded_value))
+            if encoded_value != None and encoded_value != "None":
+                new_value.append('%s:%s' % (key, encoded_value))
         return new_value
 
     def encode_prop(self, prop, value):
@@ -104,26 +120,39 @@ class SDBConverter:
             value = [value]
         if hasattr(prop, 'item_type'):
             item_type = getattr(prop, "item_type")
-            if Model in item_type.mro():
-                return [item_type(id=v) for v in value]
-            return [self.decode(item_type, v) for v in value]
-        else:
-            return value
+            dec_val = {}
+            for val in value:
+                if val != "None" and val != None:
+                    k,v = self.decode_map_element(item_type, val)
+                    try:
+                        k = int(k)
+                    except:
+                        k = v
+                    dec_val[k] = v
+            value = dec_val.values()
+        return value
 
     def decode_map(self, prop, value):
         if not isinstance(value, list):
             value = [value]
         ret_value = {}
         item_type = getattr(prop, "item_type")
-        for keyval in value:
-            key, val = keyval.split(':', 1)
-            if Model in item_type.mro():
-                val = item_type(id=val)
-            else:
-                val = self.decode(item_type, val)
-            ret_value[key] = val
+        for val in value:
+            k,v = self.decode_map_element(item_type, val)
+            ret_value[k] = v
         return ret_value
-        
+
+    def decode_map_element(self, item_type, value):
+        """Decode a single element for a map"""
+        key = value
+        if ":" in value:
+            key, value = value.split(':',1)
+        if Model in item_type.mro():
+            value = item_type(id=value)
+        else:
+            value = self.decode(item_type, value)
+        return (key, value)
+
     def decode_prop(self, prop, value):
         if isinstance(prop, ListProperty):
             return self.decode_list(prop, value)
@@ -138,6 +167,11 @@ class SDBConverter:
         return '%010d' % value
 
     def decode_int(self, value):
+        try:
+            value = int(value)
+        except:
+            boto.log.error("Error, %s is not an integer" % value)
+            value = 0
         value = int(value)
         value -= 2147483648
         return int(value)
@@ -153,7 +187,7 @@ class SDBConverter:
         return value
 
     def encode_bool(self, value):
-        if value == True:
+        if value == True or str(value).lower() in ("true", "yes"):
             return 'true'
         else:
             return 'false'
@@ -224,22 +258,36 @@ class SDBConverter:
         except:
             return None
 
+    def encode_date(self, value):
+        if isinstance(value, str) or isinstance(value, unicode):
+            return value
+        return value.isoformat()
+
+    def decode_date(self, value):
+        try:
+            value = value.split("-")
+            return date(int(value[0]), int(value[1]), int(value[2]))
+        except:
+            return None
+
     def encode_reference(self, value):
+        if value in (None, 'None', '', ' '):
+            return 'None'
         if isinstance(value, str) or isinstance(value, unicode):
             return value
-        if value == None:
-            return ''
         else:
             return value.id
 
     def decode_reference(self, value):
-        if not value:
+        if not value or value == "None":
             return None
         return value
 
     def encode_blob(self, value):
         if not value:
             return None
+        if isinstance(value, str):
+            return value
 
         if not value.id:
             bucket = self.manager.get_blob_bucket()
@@ -277,7 +325,7 @@ class SDBConverter:
 class SDBManager(object):
     
     def __init__(self, cls, db_name, db_user, db_passwd,
-                 db_host, db_port, db_table, ddl_dir, enable_ssl):
+                 db_host, db_port, db_table, ddl_dir, enable_ssl, consistent=None):
         self.cls = cls
         self.db_name = db_name
         self.db_user = db_user
@@ -290,19 +338,35 @@ class SDBManager(object):
         self.s3 = None
         self.bucket = None
         self.converter = SDBConverter(self)
-        self._connect()
+        self._sdb = None
+        self._domain = None
+        if consistent == None and hasattr(cls, "__consistent"):
+            consistent = cls.__consistent__
+        self.consistent = consistent
+
+    @property
+    def sdb(self):
+        if self._sdb is None:
+            self._connect()
+        return self._sdb
+
+    @property
+    def domain(self):
+        if self._domain is None:
+            self._connect()
+        return self._domain
 
     def _connect(self):
-        self.sdb = boto.connect_sdb(aws_access_key_id=self.db_user,
+        self._sdb = boto.connect_sdb(aws_access_key_id=self.db_user,
                                     aws_secret_access_key=self.db_passwd,
                                     is_secure=self.enable_ssl)
         # This assumes that the domain has already been created
         # It's much more efficient to do it this way rather than
         # having this make a roundtrip each time to validate.
         # The downside is that if the domain doesn't exist, it breaks
-        self.domain = self.sdb.lookup(self.db_name, validate=False)
-        if not self.domain:
-            self.domain = self.sdb.create_domain(self.db_name)
+        self._domain = self._sdb.lookup(self.db_name, validate=False)
+        if not self._domain:
+            self._domain = self._sdb.create_domain(self.db_name)
 
     def _object_lister(self, cls, query_lister):
         for item in query_lister:
@@ -337,19 +401,22 @@ class SDBManager(object):
             
     def load_object(self, obj):
         if not obj._loaded:
-            a = self.domain.get_attributes(obj.id)
+            a = self.domain.get_attributes(obj.id,consistent_read=self.consistent)
             if a.has_key('__type__'):
                 for prop in obj.properties(hidden=False):
                     if a.has_key(prop.name):
                         value = self.decode_value(prop, a[prop.name])
                         value = prop.make_value_from_datastore(value)
-                        setattr(obj, prop.name, value)
+                        try:
+                            setattr(obj, prop.name, value)
+                        except Exception, e:
+                            boto.log.exception(e)
             obj._loaded = True
         
     def get_object(self, cls, id, a=None):
         obj = None
         if not a:
-            a = self.domain.get_attributes(id)
+            a = self.domain.get_attributes(id,consistent_read=self.consistent)
         if a.has_key('__type__'):
             if not cls or a['__type__'] != cls.__name__:
                 cls = find_class(a['__module__'], a['__type__'])
@@ -387,6 +454,24 @@ class SDBManager(object):
         count =  int(self.domain.select(query).next()["Count"])
         return count
 
+
+    def _build_filter(self, property, name, op, val):
+        if val == None:
+            if op in ('is','='):
+                return "`%s` is null" % name
+            elif op in ('is not', '!='):
+                return "`%s` is not null" % name
+            else:
+                val = ""
+        if property.__class__ == ListProperty:
+            if op in ("is", "="):
+                op = "like"
+            elif op in ("!=", "not"):
+                op = "not like"
+            if not(op == "like" and val.startswith("%")):
+                val = "%%:%s" % val
+        return "`%s` %s '%s'" % (name, op, val.replace("'", "''"))
+
     def _build_filter_part(self, cls, filters, order_by=None):
         """
         Build the filter part
@@ -402,37 +487,39 @@ class SDBManager(object):
                 order_by_method = "asc";
 
         for filter in filters:
-            (name, op) = filter[0].strip().split(" ", 1)
-            value = filter[1]
-            property = cls.find_property(name)
-            if name == order_by:
-                order_by_filtered = True
-            if types.TypeType(value) == types.ListType:
-                filter_parts = []
-                for val in value:
-                    val = self.encode_value(property, val)
-                    if isinstance(val, list):
-                        for v in val:
-                            filter_parts.append("`%s` %s '%s'" % (name, op, v.replace("'", "''")))
-                    else:
-                        filter_parts.append("`%s` %s '%s'" % (name, op, val.replace("'", "''")))
-                query_parts.append("(%s)" % (" or ".join(filter_parts)))
-            else:
-                if op == 'is' and value == None:
-                    query_parts.append("`%s` is null" % name)
-                elif op == 'is not' and value == None:
-                    query_parts.append("`%s` is not null" % name)
+            filter_parts = []
+            filter_props = filter[0]
+            if type(filter_props) != list:
+                filter_props = [filter_props]
+            for filter_prop in filter_props:
+                (name, op) = filter_prop.strip().split(" ", 1)
+                value = filter[1]
+                property = cls.find_property(name)
+                if name == order_by:
+                    order_by_filtered = True
+                if types.TypeType(value) == types.ListType:
+                    filter_parts_sub = []
+                    for val in value:
+                        val = self.encode_value(property, val)
+                        if isinstance(val, list):
+                            for v in val:
+                                filter_parts_sub.append(self._build_filter(property, name, op, v))
+                        else:
+                            filter_parts_sub.append(self._build_filter(property, name, op, val))
+                    filter_parts.append("(%s)" % (" or ".join(filter_parts_sub)))
                 else:
                     val = self.encode_value(property, value)
                     if isinstance(val, list):
                         for v in val:
-                            query_parts.append("`%s` %s '%s'" % (name, op, v.replace("'", "''")))
+                            filter_parts.append(self._build_filter(property, name, op, v))
                     else:
-                        query_parts.append("`%s` %s '%s'" % (name, op, val.replace("'", "''")))
+                        filter_parts.append(self._build_filter(property, name, op, val))
+            query_parts.append("(%s)" % (" or ".join(filter_parts)))
+
 
         type_query = "(`__type__` = '%s'" % cls.__name__
-        for subclass in cls.__sub_classes__:
-            type_query += " or `__type__` = '%s'" % subclass.__name__
+        for subclass in self._get_all_decendents(cls).keys():
+            type_query += " or `__type__` = '%s'" % subclass
         type_query +=")"
         query_parts.append(type_query)
 
@@ -448,6 +535,14 @@ class SDBManager(object):
             return ""
 
 
+    def _get_all_decendents(self, cls):
+        """Get all decendents for a given class"""
+        decendents = {}
+        for sc in cls.__sub_classes__:
+            decendents[sc.__name__] = sc
+            decendents.update(self._get_all_decendents(sc))
+        return decendents
+
     def query_gql(self, query_string, *args, **kwds):
         raise NotImplementedError, "GQL queries not supported in SimpleDB"
 
@@ -462,7 +557,9 @@ class SDBManager(object):
             value = property.get_value_for_datastore(obj)
             if value is not None:
                 value = self.encode_value(property, value)
-                attrs[property.name] = value
+            if value == []:
+                value = None
+            attrs[property.name] = value
             if property.unique:
                 try:
                     args = {property.name: value}
@@ -490,7 +587,7 @@ class SDBManager(object):
         self.domain.put_attributes(obj.id, {name : value}, replace=True)
 
     def get_property(self, prop, obj, name):
-        a = self.domain.get_attributes(obj.id)
+        a = self.domain.get_attributes(obj.id,consistent_read=self.consistent)
 
         # try to get the attribute value from SDB
         if name in a:
@@ -507,7 +604,7 @@ class SDBManager(object):
         self.domain.delete_attributes(obj.id, name)
 
     def get_key_value(self, obj, name):
-        a = self.domain.get_attributes(obj.id, name)
+        a = self.domain.get_attributes(obj.id, name,consistent_read=self.consistent)
         if a.has_key(name):
             return a[name]
         else:

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/xmlmanager.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/xmlmanager.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/xmlmanager.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/xmlmanager.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/xmlmanager.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/manager/xmlmanager.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/manager/xmlmanager.py Sun Jun  5 08:36:52 2011
@@ -20,11 +20,9 @@
 # IN THE SOFTWARE.
 import boto
 from boto.utils import find_class, Password
-import uuid
 from boto.sdb.db.key import Key
 from boto.sdb.db.model import Model
 from datetime import datetime
-from boto.exception import SDBPersistenceError
 from xml.dom.minidom import getDOMImplementation, parse, parseString, Node
 
 ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
@@ -461,7 +459,7 @@ class XMLManager(object):
                 elif isinstance(value, Node):
                     prop_node.appendChild(value)
                 else:
-                    text_node = doc.createTextNode(str(value))
+                    text_node = doc.createTextNode(unicode(value).encode("ascii", "ignore"))
                     prop_node.appendChild(text_node)
             obj_node.appendChild(prop_node)
 

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/model.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/model.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/model.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/model.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/model.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/model.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/model.py Sun Jun  5 08:36:52 2011
@@ -20,7 +20,7 @@
 # IN THE SOFTWARE.
 
 from boto.sdb.db.manager import get_manager
-from boto.sdb.db.property import *
+from boto.sdb.db.property import Property
 from boto.sdb.db.key import Key
 from boto.sdb.db.query import Query
 import boto
@@ -55,6 +55,8 @@ class ModelMeta(type):
         
 class Model(object):
     __metaclass__ = ModelMeta
+    __consistent__ = False # Consistent is set off by default
+    id = None
 
     @classmethod
     def get_lineage(cls):
@@ -181,6 +183,11 @@ class Model(object):
         if self.id and not self._loaded:
             self._manager.load_object(self)
 
+    def reload(self):
+        if self.id:
+            self._loaded = False
+            self._manager.load_object(self)
+
     def put(self):
         self._manager.save_object(self)
 

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/property.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/property.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/property.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/property.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/property.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/property.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/property.py Sun Jun  5 08:36:52 2011
@@ -23,8 +23,6 @@ import datetime
 from key import Key
 from boto.utils import Password
 from boto.sdb.db.query import Query
-from tempfile import TemporaryFile
-
 import re
 import boto
 import boto.s3.key
@@ -63,7 +61,7 @@ class Property(object):
             if obj._loaded and hasattr(obj, "on_set_%s" % self.name):
                 fnc = getattr(obj, "on_set_%s" % self.name)
                 value = fnc(value)
-        except Exception, e:
+        except Exception:
             boto.log.exception("Exception running on_set_%s" % self.name)
 
         setattr(obj, self.slot_name, value)
@@ -261,6 +259,13 @@ class IntegerProperty(Property):
     def empty(self, value):
         return value is None
 
+    def __set__(self, obj, value):
+        if value == "" or value == None:
+            value = 0
+        return Property.__set__(self, obj, value)
+
+
+
 class LongProperty(Property):
 
     data_type = long
@@ -343,6 +348,36 @@ class DateTimeProperty(Property):
     def now(self):
         return datetime.datetime.utcnow()
 
+class DateProperty(Property):
+
+    data_type = datetime.date
+    type_name = 'Date'
+
+    def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False, name=None,
+                 default=None, required=False, validator=None, choices=None, unique=False):
+        Property.__init__(self, verbose_name, name, default, required, validator, choices, unique)
+        self.auto_now = auto_now
+        self.auto_now_add = auto_now_add
+
+    def default_value(self):
+        if self.auto_now or self.auto_now_add:
+            return self.now()
+        return Property.default_value(self)
+
+    def validate(self, value):
+        if value == None:
+            return
+        if not isinstance(value, self.data_type):
+            raise TypeError, 'Validation Error, expecting %s, got %s' % (self.data_type, type(value))
+
+    def get_value_for_datastore(self, model_instance):
+        if self.auto_now:
+            setattr(model_instance, self.name, self.now())
+        return Property.get_value_for_datastore(self, model_instance)
+
+    def now(self):
+        return datetime.date.today()
+
 class ReferenceProperty(Property):
 
     data_type = Key
@@ -363,17 +398,17 @@ class ReferenceProperty(Property):
             # the object now that is the attribute has actually been accessed.  This lazy
             # instantiation saves unnecessary roundtrips to SimpleDB
             if isinstance(value, str) or isinstance(value, unicode):
-                # This is some minor handling to allow us to use the base "Model" class
-                # as our reference class. If we do so, we're going to assume we're using
-                # our own class's manager to fetch objects
-                if hasattr(self.reference_class, "_manager"):
-                    manager = self.reference_class._manager
-                else:
-                    manager = obj._manager
-                value = manager.get_object(self.reference_class, value)
+                value = self.reference_class(value)
                 setattr(obj, self.name, value)
             return value
-    
+
+    def __set__(self, obj, value):
+        """Don't allow this object to be associated to itself
+        This causes bad things to happen"""
+        if value != None and (obj.id == value or (hasattr(value, "id") and obj.id == value.id)):
+            raise ValueError, "Can not associate an object with itself!"
+        return super(ReferenceProperty, self).__set__(obj,value)
+
     def __property_config__(self, model_class, property_name):
         Property.__property_config__(self, model_class, property_name)
         if self.collection_name is None:
@@ -414,6 +449,7 @@ class _ReverseReferenceProperty(Property
     def __init__(self, model, prop, name):
         self.__model = model
         self.__property = prop
+        self.collection_name = prop
         self.name = name
         self.item_type = model
 
@@ -421,7 +457,13 @@ class _ReverseReferenceProperty(Property
         """Fetches collection of model instances of this collection property."""
         if model_instance is not None:
             query = Query(self.__model)
-            return query.filter(self.__property + ' =', model_instance)
+            if type(self.__property) == list:
+                props = []
+                for prop in self.__property:
+                    props.append("%s =" % prop)
+                return query.filter(props, model_instance)
+            else:
+                return query.filter(self.__property + ' =', model_instance)
         else:
             return self
 
@@ -514,6 +556,8 @@ class ListProperty(Property):
             item_type = self.item_type
         if isinstance(value, item_type):
             value = [value]
+        elif value == None: # Override to allow them to set this to "None" to remove everything
+            value = []
         return super(ListProperty, self).__set__(obj,value)
 
 

Copied: incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/query.py (from r1132067, incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/query.py)
URL: http://svn.apache.org/viewvc/incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/query.py?p2=incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/query.py&p1=incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/query.py&r1=1132067&r2=1132068&rev=1132068&view=diff
==============================================================================
--- incubator/mesos/trunk/third_party/boto-1.9b/boto/sdb/db/query.py (original)
+++ incubator/mesos/trunk/third_party/boto-2.0b2/boto/sdb/db/query.py Sun Jun  5 08:36:52 2011
@@ -24,6 +24,7 @@ class Query(object):
     def __init__(self, model_class, limit=None, next_token=None, manager=None):
         self.model_class = model_class
         self.limit = limit
+        self.offset = 0
         if manager:
             self.manager = manager
         else:
@@ -46,11 +47,18 @@ class Query(object):
         return self
 
     def fetch(self, limit, offset=0):
-        raise NotImplementedError, "fetch mode is not currently supported"
+        """Not currently fully supported, but we can use this
+        to allow them to set a limit in a chainable method"""
+        self.limit = limit
+        self.offset = offset
+        return self
 
     def count(self):
         return self.manager.count(self.model_class, self.filters)
 
+    def get_query(self):
+        return self.manager._build_filter_part(self.model_class, self.filters, self.sort_by)
+
     def order(self, key):
         self.sort_by = key
         return self