Procházet zdrojové kódy

Fixed remaining errors in object store tests for boto3

Nuwan Goonasekera před 8 roky
rodič
revize
0097ba333e

+ 54 - 33
cloudbridge/cloud/providers/aws/resources.py

@@ -4,10 +4,6 @@ DataTypes used by this provider
 import hashlib
 import inspect
 
-from datetime import datetime
-
-from boto.exception import EC2ResponseError
-from boto.s3.key import Key
 from botocore.exceptions import ClientError
 
 from cloudbridge.cloud.base.resources import BaseAttachmentInfo
@@ -450,7 +446,7 @@ class AWSInstance(BaseInstance):
         """
         try:
             self._ec2_instance.reload()
-        except (EC2ResponseError, ValueError):
+        except ClientError:
             # The volume no longer exists and cannot be refreshed.
             # set the status to unknown
             self._ec2_instance.state = {'Name': InstanceState.UNKNOWN}
@@ -594,7 +590,7 @@ class AWSVolume(BaseVolume):
         """
         try:
             self._volume.reload()
-        except (EC2ResponseError, ValueError):
+        except ClientError:
             # The volume no longer exists and cannot be refreshed.
             # set the status to unknown
             self._volume.state = VolumeState.UNKNOWN
@@ -679,7 +675,7 @@ class AWSSnapshot(BaseSnapshot):
         """
         try:
             self._snapshot.reload()
-        except (EC2ResponseError, ValueError):
+        except ClientError:
             # The snapshot no longer exists and cannot be refreshed.
             # set the status to unknown
             self._snapshot.state = SnapshotState.UNKNOWN
@@ -883,59 +879,74 @@ class AWSSecurityGroupRule(BaseSecurityGroupRule):
 
 class AWSBucketObject(BaseBucketObject):
 
-    def __init__(self, provider, key):
+    class BucketObjIterator():
+        CHUNK_SIZE = 4096
+
+        def __init__(self, body):
+            self.body = body
+
+        def __iter__(self):
+            while True:
+                data = self.read(self.CHUNK_SIZE)
+                if data:
+                    yield data
+                else:
+                    break
+
+        def read(self, length):
+            return self.body.read(amt=length)
+
+        def close(self):
+            return self.body.close()
+
+    def __init__(self, provider, obj):
         super(AWSBucketObject, self).__init__(provider)
-        self._key = key
+        self._obj = obj
 
     @property
     def id(self):
-        return self._key.name
+        return self._obj.key
 
     @property
     def name(self):
         """
         Get this object's name.
         """
-        return self._key.name
+        return self.id
 
     @property
     def size(self):
         """
         Get this object's size.
         """
-        return self._key.size
+        return self._obj.size
 
     @property
     def last_modified(self):
         """
         Get the date and time this object was last modified.
         """
-        if self._key.last_modified:
-            lm = datetime.strptime(self._key.last_modified,
-                                   "%Y-%m-%dT%H:%M:%S.%fZ")
-            return lm.strftime("%Y-%m-%dT%H:%M:%S.%f")
-        else:
-            return None
+        return self._obj.last_modified.strftime("%Y-%m-%dT%H:%M:%S.%f")
 
     def iter_content(self):
         """
         Returns this object's content as an
         iterable.
         """
-        return self._key
+        return self.BucketObjIterator(self._obj.get().get('Body'))
 
     def upload(self, data):
         """
         Set the contents of this object to the data read from the source
         string.
         """
-        self._key.set_contents_from_string(data)
+        self._obj.put(Body=data)
 
     def upload_from_file(self, path):
         """
         Store the contents of the file pointed by the "path" variable.
         """
-        self._key.set_contents_from_filename(path)
+        self._obj.upload_file(path)
 
     def delete(self):
         """
@@ -944,13 +955,16 @@ class AWSBucketObject(BaseBucketObject):
         :rtype: bool
         :return: True if successful
         """
-        self._key.delete()
+        self._obj.delete()
 
     def generate_url(self, expires_in=0):
         """
         Generate a URL to this object.
         """
-        return self._key.generate_url(expires_in=expires_in)
+        return self._provider.s3_conn.meta.client.generate_presigned_url(
+            'get_object',
+            Params={'Bucket': self._obj.bucket_name, 'Key': self.id},
+            ExpiresIn=expires_in)
 
 
 class AWSBucket(BaseBucket):
@@ -974,10 +988,13 @@ class AWSBucket(BaseBucket):
         """
         Retrieve a given object from this bucket.
         """
-        key = Key(self._bucket, name)
-        if key and key.exists():
-            return AWSBucketObject(self._provider, key)
-        return None
+        try:
+            obj = self._bucket.Object(name)
+            # load() throws an error if object does not exist
+            obj.load()
+            return AWSBucketObject(self._provider, obj)
+        except ClientError:
+            return None
 
     def list(self, limit=None, marker=None, prefix=None):
         """
@@ -986,8 +1003,12 @@ class AWSBucket(BaseBucket):
         :rtype: BucketObject
         :return: List of all available BucketObjects within this bucket.
         """
+        if prefix:
+            boto_objs = self._bucket.objects.filter(Prefix=prefix)
+        else:
+            boto_objs = self._bucket.objects.all()
         objects = [AWSBucketObject(self._provider, obj)
-                   for obj in self._bucket.list(prefix=prefix)]
+                   for obj in boto_objs]
 
         return ClientPagedResultList(self._provider, objects,
                                      limit=limit, marker=marker)
@@ -1005,8 +1026,8 @@ class AWSBucket(BaseBucket):
         self._bucket.delete()
 
     def create_object(self, name):
-        key = Key(self._bucket, name)
-        return AWSBucketObject(self._provider, key)
+        obj = self._bucket.Object(name)
+        return AWSBucketObject(self._provider, obj)
 
 
 class AWSRegion(BaseRegion):
@@ -1112,7 +1133,7 @@ class AWSNetwork(BaseNetwork):
         """
         try:
             self._vpc.reload()
-        except (EC2ResponseError, ValueError):
+        except ClientError:
             # The network no longer exists and cannot be refreshed.
             # set the status to unknown
             self._vpc.state = NetworkState.UNKNOWN
@@ -1253,7 +1274,7 @@ class AWSRouter(BaseRouter):
     def refresh(self):
         try:
             self._route_table.reload()
-        except (EC2ResponseError, ValueError):
+        except ClientError:
             self._route_table.associations = None
 
     @property
@@ -1325,7 +1346,7 @@ class AWSInternetGateway(BaseInternetGateway):
     def refresh(self):
         try:
             self._gateway.reload()
-        except (EC2ResponseError, ValueError):
+        except ClientError:
             self._gateway.state = GatewayState.UNKNOWN
 
     @property

+ 48 - 30
cloudbridge/cloud/providers/aws/services.py

@@ -55,7 +55,7 @@ from .resources import AWSVolume
 # cb.set_stream_logger(__name__)
 
 
-class EC2ServiceFilter(object):
+class GenericServiceFilter(object):
     '''
     Generic AWS EC2 service filter interface
 
@@ -63,9 +63,10 @@ class EC2ServiceFilter(object):
     :param str service: Name of the EC2 service to use
     :param BaseCloudResource cb_iface: CloudBridge class to use
     '''
-    def __init__(self, provider, service, cb_iface):
+    def __init__(self, provider, boto_conn, service, cb_iface):
         self.provider = provider
-        self.service = getattr(self.provider.ec2_conn, service)
+        self.boto_conn = boto_conn
+        self.service = getattr(self.boto_conn, service)
         self.iface = cb_iface
 
     def get(self, val, filter_name, wrapper=True):
@@ -128,7 +129,7 @@ class EC2ServiceFilter(object):
         :param object kwargs: Arguments to be passed as-is to
             the service method
         '''
-        res = getattr(self.provider.ec2_conn, method)(**kwargs)
+        res = getattr(self.boto_conn, method)(**kwargs)
         if isinstance(res, list):
             return [self.iface(self.provider, x) if x else None for x in res]
         return self.iface(self.provider, res) if res else None
@@ -151,6 +152,32 @@ class EC2ServiceFilter(object):
         return True
 
 
+class EC2ServiceFilter(GenericServiceFilter):
+    '''
+    Generic AWS EC2 service filter interface
+
+    :param AWSCloudProvider provider: AWS EC2 provider interface
+    :param str service: Name of the EC2 service to use
+    :param BaseCloudResource cb_iface: CloudBridge class to use
+    '''
+    def __init__(self, provider, service, cb_iface):
+        super(EC2ServiceFilter, self).__init__(
+            provider, provider.ec2_conn, service, cb_iface)
+
+
+class S3ServiceFilter(GenericServiceFilter):
+    '''
+    Generic AWS S3 service filter interface
+
+    :param AWSCloudProvider provider: AWS provider interface
+    :param str service: Name of the S3 service to use
+    :param BaseCloudResource cb_iface: CloudBridge class to use
+    '''
+    def __init__(self, provider, service, cb_iface):
+        super(S3ServiceFilter, self).__init__(
+            provider, provider.s3_conn, service, cb_iface)
+
+
 class AWSSecurityService(BaseSecurityService):
 
     def __init__(self, provider):
@@ -313,6 +340,7 @@ class AWSObjectStoreService(BaseObjectStoreService):
 
     def __init__(self, provider):
         super(AWSObjectStoreService, self).__init__(provider)
+        self.iface = S3ServiceFilter(self.provider, 'buckets', AWSBucket)
 
     def get(self, bucket_id):
         """
@@ -320,12 +348,12 @@ class AWSObjectStoreService(BaseObjectStoreService):
         does not exist.
         """
         try:
-            # Make a call to make sure the bucket exists. While this would
-            # normally return a Bucket instance, there's an edge case where a
-            # 403 response can occur when the bucket exists but the
+            # Make a call to make sure the bucket exists. There's an edge case
+            # where a 403 response can occur when the bucket exists but the
             # user simply does not have permissions to access it. See below.
-            bucket = self.provider.s3_conn.get_bucket(bucket_id)
-            return AWSBucket(self.provider, bucket)
+            self.provider.s3_conn.meta.client.head_bucket(Bucket=bucket_id)
+            return AWSBucket(self.provider,
+                             self.provider.s3_conn.Bucket(bucket_id))
         except ClientError as e:
             # If 403, it means the bucket exists, but the user does not have
             # permissions to access the bucket. However, limited operations
@@ -333,7 +361,7 @@ class AWSObjectStoreService(BaseObjectStoreService):
             # Bucket instance to allow further operations.
             # http://stackoverflow.com/questions/32331456/using-boto-upload-file-to-s3-
             # sub-folder-when-i-have-no-permissions-on-listing-fo
-            if e.status == 403:
+            if e.response['Error']['Code'] == 403:
                 bucket = self.provider.s3_conn.get_bucket(bucket_id,
                                                           validate=False)
                 return AWSBucket(self.provider, bucket)
@@ -341,34 +369,24 @@ class AWSObjectStoreService(BaseObjectStoreService):
         return None
 
     def find(self, name, limit=None, marker=None):
-        """
-        Searches for a bucket by a given list of attributes.
-        """
-        buckets = [AWSBucket(self.provider, bucket)
-                   for bucket in self.provider.s3_conn.get_all_buckets()
-                   if name in bucket.name]
+        buckets = [bucket
+                   for bucket in self
+                   if name == bucket.name]
         return ClientPagedResultList(self.provider, buckets,
                                      limit=limit, marker=marker)
 
     def list(self, limit=None, marker=None):
-        """
-        List all containers.
-        """
-        buckets = [AWSBucket(self.provider, bucket)
-                   for bucket in self.provider.s3_conn.get_all_buckets()]
-        return ClientPagedResultList(self.provider, buckets,
-                                     limit=limit, marker=marker)
+        return self.iface.list(limit=limit, marker=marker)
 
     def create(self, name, location=None):
-        """
-        Create a new bucket.
-        """
         AWSBucket.assert_valid_resource_name(name)
 
-        bucket = self.provider.s3_conn.create_bucket(
-            name,
-            location=location if location else '')
-        return AWSBucket(self.provider, bucket)
+        self.iface.create(
+            'create_bucket', Bucket=name,
+            CreateBucketConfiguration={
+                'LocationConstraint': location or self.provider.region_name
+            })
+        return self.get(name)
 
 
 class AWSImageService(BaseImageService):

+ 1 - 1
requirements.txt

@@ -1,2 +1,2 @@
-pydevd
+git+git://github.com/spulec/moto
 -e ".[dev]"

+ 4 - 1
test/test_object_store_service.py

@@ -10,6 +10,7 @@ from test.helpers import ProviderTestBase
 from test.helpers import standard_interface_tests as sit
 from unittest import skip
 
+from cloudbridge.cloud.factory import ProviderList
 from cloudbridge.cloud.interfaces.exceptions import InvalidNameException
 from cloudbridge.cloud.interfaces.resources import Bucket
 from cloudbridge.cloud.interfaces.resources import BucketObject
@@ -153,9 +154,11 @@ class CloudObjectStoreServiceTestCase(ProviderTestBase):
                     target_stream2.write(data)
                 self.assertEqual(target_stream2.getvalue(), content)
 
-    @skip("Skip until OpenStack implementation is provided")
     @helpers.skipIfNoService(['object_store'])
     def test_generate_url(self):
+        if self.provider.PROVIDER_ID == ProviderList.OPENSTACK:
+            raise self.skipTest("Skip until OpenStack impl is provided")
+
         name = "cbtestbucketobjs-{0}".format(uuid.uuid4())
         test_bucket = self.provider.object_store.create(name)