Alessandro Pilotti 10 ani în urmă
comite
3aa688b3eb
45 a modificat fișierele cu 2935 adăugiri și 0 ștergeri
  1. 39 0
      .gitignore
  2. 0 0
      coriolis/__init__.py
  3. 130 0
      coriolis/api/__init__.py
  4. 5 0
      coriolis/api/v1/__init__.py
  5. 58 0
      coriolis/api/v1/migrations.py
  6. 33 0
      coriolis/api/v1/router.py
  7. 5 0
      coriolis/api/v1/views/__init__.py
  8. 17 0
      coriolis/api/v1/views/migration_view.py
  9. 1268 0
      coriolis/api/wsgi.py
  10. 28 0
      coriolis/cmd/api.py
  11. 31 0
      coriolis/cmd/conductor.py
  12. 31 0
      coriolis/cmd/worker.py
  13. 0 0
      coriolis/conductor/__init__.py
  14. 0 0
      coriolis/conductor/rpc/__init__.py
  15. 39 0
      coriolis/conductor/rpc/client.py
  16. 94 0
      coriolis/conductor/rpc/server.py
  17. 13 0
      coriolis/constants.py
  18. 16 0
      coriolis/context.py
  19. 0 0
      coriolis/db/__init__.py
  20. 75 0
      coriolis/db/api.py
  21. 0 0
      coriolis/db/sqlalchemy/__init__.py
  22. 49 0
      coriolis/db/sqlalchemy/api.py
  23. 0 0
      coriolis/db/sqlalchemy/migrate_repo/__init__.py
  24. 5 0
      coriolis/db/sqlalchemy/migrate_repo/manage.py
  25. 25 0
      coriolis/db/sqlalchemy/migrate_repo/migrate.cfg
  26. 55 0
      coriolis/db/sqlalchemy/migrate_repo/versions/001_initial.py
  27. 0 0
      coriolis/db/sqlalchemy/migrate_repo/versions/__init__.py
  28. 24 0
      coriolis/db/sqlalchemy/migration.py
  29. 36 0
      coriolis/db/sqlalchemy/models.py
  30. 231 0
      coriolis/exception.py
  31. 50 0
      coriolis/i18n.py
  32. 20 0
      coriolis/migrations/api.py
  33. 0 0
      coriolis/providers/__init__.py
  34. 10 0
      coriolis/providers/base.py
  35. 24 0
      coriolis/providers/factory.py
  36. 10 0
      coriolis/providers/openstack/__init__.py
  37. 249 0
      coriolis/providers/vmware_vsphere/__init__.py
  38. 132 0
      coriolis/service.py
  39. 16 0
      coriolis/utils.py
  40. 0 0
      coriolis/worker/__init__.py
  41. 0 0
      coriolis/worker/rpc/__init__.py
  42. 32 0
      coriolis/worker/rpc/client.py
  43. 76 0
      coriolis/worker/rpc/server.py
  44. 5 0
      etc/coriolis/api-paste.ini
  45. 4 0
      etc/coriolis/coriolis.conf

+ 39 - 0
.gitignore

@@ -0,0 +1,39 @@
+*.DS_Store
+*.egg*
+*.log
+*.mo
+*.pyc
+*.swo
+*.swp
+*.orig
+*.sqlite
+.autogenerated
+.coverage
+.idea
+.nova-venv
+.project
+.pydevproject
+.ropeproject
+.tox
+.venv
+AUTHORS
+Authors
+build-stamp
+build/*
+CA/
+ChangeLog
+coverage.xml
+cover/*
+covhtml
+dist/*
+doc/source/api/*
+doc/build/*
+instances
+keeper
+keys
+local_settings.py
+MANIFEST
+nosetests.xml
+nova/tests/cover/*
+nova/vcsversion.py
+tools/conf/nova.conf*

+ 0 - 0
coriolis/__init__.py


+ 130 - 0
coriolis/api/__init__.py

@@ -0,0 +1,130 @@
+# Copyright (c) 2013 OpenStack Foundation
+#
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+"""
+WSGI middleware for OpenStack API controllers.
+"""
+
+from oslo_log import log as logging
+from oslo_service import wsgi as base_wsgi
+import routes
+
+from coriolis.api import wsgi
+from coriolis.i18n import _, _LW
+
+
+LOG = logging.getLogger(__name__)
+
+
+class APIMapper(routes.Mapper):
+    def routematch(self, url=None, environ=None):
+        if url is "":
+            result = self._match("", environ)
+            return result[0], result[1]
+        return routes.Mapper.routematch(self, url, environ)
+
+    def connect(self, *args, **kwargs):
+        # NOTE(inhye): Default the format part of a route to only accept json
+        #             and xml so it doesn't eat all characters after a '.'
+        #             in the url.
+        kwargs.setdefault('requirements', {})
+        if not kwargs['requirements'].get('format'):
+            kwargs['requirements']['format'] = 'json|xml'
+        return routes.Mapper.connect(self, *args, **kwargs)
+
+
+class ProjectMapper(APIMapper):
+    def resource(self, member_name, collection_name, **kwargs):
+        if 'parent_resource' not in kwargs:
+            kwargs['path_prefix'] = '{project_id}/'
+        else:
+            parent_resource = kwargs['parent_resource']
+            p_collection = parent_resource['collection_name']
+            p_member = parent_resource['member_name']
+            kwargs['path_prefix'] = '{project_id}/%s/:%s_id' % (p_collection,
+                                                                p_member)
+        routes.Mapper.resource(self,
+                               member_name,
+                               collection_name,
+                               **kwargs)
+
+
+class APIRouter(base_wsgi.Router):
+    """Routes requests on the API to the appropriate controller and method."""
+    ExtensionManager = None  # override in subclasses
+
+    @classmethod
+    def factory(cls, global_config, **local_config):
+        """Simple paste factory, :class:`cinder.wsgi.Router` doesn't have."""
+        return cls()
+
+    def __init__(self, ext_mgr=None):
+        if ext_mgr is None:
+            if self.ExtensionManager:
+                ext_mgr = self.ExtensionManager()
+            else:
+                raise Exception(_("Must specify an ExtensionManager class"))
+
+        mapper = ProjectMapper()
+        self.resources = {}
+        self._setup_routes(mapper, ext_mgr)
+        self._setup_ext_routes(mapper, ext_mgr)
+        self._setup_extensions(ext_mgr)
+        super(APIRouter, self).__init__(mapper)
+
+    def _setup_ext_routes(self, mapper, ext_mgr):
+        for resource in ext_mgr.get_resources():
+            LOG.debug('Extended resource: %s',
+                      resource.collection)
+
+            wsgi_resource = wsgi.Resource(resource.controller)
+            self.resources[resource.collection] = wsgi_resource
+            kargs = dict(
+                controller=wsgi_resource,
+                collection=resource.collection_actions,
+                member=resource.member_actions)
+
+            if resource.parent:
+                kargs['parent_resource'] = resource.parent
+
+            mapper.resource(resource.collection, resource.collection, **kargs)
+
+            if resource.custom_routes_fn:
+                resource.custom_routes_fn(mapper, wsgi_resource)
+
+    def _setup_extensions(self, ext_mgr):
+        for extension in ext_mgr.get_controller_extensions():
+            collection = extension.collection
+            controller = extension.controller
+
+            if collection not in self.resources:
+                LOG.warning(_LW('Extension %(ext_name)s: Cannot extend '
+                                'resource %(collection)s: No such resource'),
+                            {'ext_name': extension.extension.name,
+                             'collection': collection})
+                continue
+
+            LOG.debug('Extension %(ext_name)s extending resource: '
+                      '%(collection)s',
+                      {'ext_name': extension.extension.name,
+                       'collection': collection})
+
+            resource = self.resources[collection]
+            resource.register_actions(controller)
+            resource.register_extensions(controller)
+
+    def _setup_routes(self, mapper, ext_mgr):
+        raise NotImplementedError

+ 5 - 0
coriolis/api/v1/__init__.py

@@ -0,0 +1,5 @@
+import paste.urlmap
+
+
+def root_app_factory(loader, global_conf, **local_conf):
+    return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf)

+ 58 - 0
coriolis/api/v1/migrations.py

@@ -0,0 +1,58 @@
+from oslo_service import wsgi
+
+from coriolis.api import wsgi as api_wsgi
+from coriolis.api.v1.views import migration_view
+from coriolis import constants
+from coriolis.migrations import api
+from coriolis.providers import factory
+
+
+class MigrationController(object):
+    def __init__(self):
+        self._migration_api = api.API()
+        super(MigrationController, self).__init__()
+
+    def show(self, req, id):
+        return migration_view.format_migration(
+            req, self._migration_api.get_migration(id))
+
+    def index(self, req):
+        return migration_view.collection(
+            req, self._migration_api.get_migrations())
+
+    def detail(self, req):
+        return migration_view.collection(
+            req, self._migration_api.get_migrations())
+
+    def _validate_create_body(self, body):
+        migration = body["migration"]
+
+        origin = migration["origin"]
+        destination = migration["destination"]
+
+        export_provider = factory.get_provider(
+            origin["type"], constants.PROVIDER_TYPE_EXPORT)
+        if not export_provider.validate_connection_info(
+                origin["connection_info"]):
+            # TODO: use a decent exception
+            raise Exception("Invalid connection info")
+
+        import_provider = factory.get_provider(
+            destination["type"], constants.PROVIDER_TYPE_IMPORT)
+        if not import_provider.validate_connection_info(
+                destination["connection_info"]):
+            # TODO: use a decent exception
+            raise Exception("Invalid connection info")
+
+        return origin, destination, migration["instances"]
+
+    def create(self, req, body):
+        origin, destination, instances = self._validate_create_body(body)
+        self._migration_api.start(origin, destination, instances)
+
+    def delete(self, req, id):
+        print("Delete: %s" % id)
+
+
+def create_resource():
+    return api_wsgi.Resource(MigrationController())

+ 33 - 0
coriolis/api/v1/router.py

@@ -0,0 +1,33 @@
+from oslo_log import log as logging
+from oslo_service import wsgi
+import routes
+
+import webob.dec
+import webob.exc
+
+from coriolis import api
+from coriolis.i18n import _LI, _LE
+from coriolis.api.v1 import migrations
+
+LOG = logging.getLogger(__name__)
+
+
+class ExtensionManager(object):
+    def get_resources(self):
+        return []
+
+    def get_controller_extensions(self):
+        return []
+
+
+class APIRouter(api.APIRouter):
+    ExtensionManager = ExtensionManager
+
+    def _setup_routes(self, mapper, ext_mgr):
+        mapper.redirect("", "/")
+
+        self.resources['migrations'] = migrations.create_resource()
+        mapper.resource('migration', 'migrations',
+                        controller=self.resources['migrations'],
+                        collection={'detail': 'GET'},
+                        member={'action': 'POST'})

+ 5 - 0
coriolis/api/v1/views/__init__.py

@@ -0,0 +1,5 @@
+import paste.urlmap
+
+
+def root_app_factory(loader, global_conf, **local_conf):
+    return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf)

+ 17 - 0
coriolis/api/v1/views/migration_view.py

@@ -0,0 +1,17 @@
+import itertools
+
+
+def format_migration(req, migration, keys=None):
+    def transform(key, value):
+        if keys and key not in keys:
+            return
+        yield (key, value)
+
+    return dict(itertools.chain.from_iterable(
+        transform(k, v) for k, v in migration.items()))
+
+
+def collection(req, migrations):
+    formatted_migrations = [format_migration(req, m)
+                            for m in migrations]
+    return {'migrations': formatted_migrations}

+ 1268 - 0
coriolis/api/wsgi.py

@@ -0,0 +1,1268 @@
+# Copyright 2011 OpenStack Foundation
+# Copyright 2013 IBM Corp.
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import inspect
+import math
+import time
+
+from oslo_log import log as logging
+from oslo_log import versionutils
+from oslo_serialization import jsonutils
+from oslo_utils import excutils
+import six
+import webob
+
+from coriolis import exception
+from coriolis import i18n
+from coriolis.i18n import _, _LE, _LI
+
+
+LOG = logging.getLogger(__name__)
+
+SUPPORTED_CONTENT_TYPES = (
+    'application/json',
+)
+
+_MEDIA_TYPE_MAP = {
+    'application/json': 'json',
+}
+
+
+class Application(object):
+    """Base WSGI application wrapper. Subclasses need to implement __call__."""
+
+    @classmethod
+    def factory(cls, global_config, **local_config):
+        """Used for paste app factories in paste.deploy config files.
+
+        Any local configuration (that is, values under the [app:APPNAME]
+        section of the paste config) will be passed into the `__init__` method
+        as kwargs.
+
+        A hypothetical configuration would look like:
+
+            [app:wadl]
+            latest_version = 1.3
+            paste.app_factory = cinder.api.fancy_api:Wadl.factory
+
+        which would result in a call to the `Wadl` class as
+
+            import cinder.api.fancy_api
+            fancy_api.Wadl(latest_version='1.3')
+
+        You could of course re-implement the `factory` method in subclasses,
+        but using the kwarg passing it shouldn't be necessary.
+
+        """
+        return cls(**local_config)
+
+    def __call__(self, environ, start_response):
+        r"""Subclasses will probably want to implement __call__ like this:
+
+        @webob.dec.wsgify(RequestClass=Request)
+        def __call__(self, req):
+          # Any of the following objects work as responses:
+
+          # Option 1: simple string
+          res = 'message\n'
+
+          # Option 2: a nicely formatted HTTP exception page
+          res = exc.HTTPForbidden(explanation='Nice try')
+
+          # Option 3: a webob Response object (in case you need to play with
+          # headers, or you want to be treated like an iterable)
+          res = Response();
+          res.app_iter = open('somefile')
+
+          # Option 4: any wsgi app to be run next
+          res = self.application
+
+          # Option 5: you can get a Response object for a wsgi app, too, to
+          # play with headers etc
+          res = req.get_response(self.application)
+
+          # You can then just return your response...
+          return res
+          # ... or set req.response and return None.
+          req.response = res
+
+        See the end of http://pythonpaste.org/webob/modules/dec.html
+        for more info.
+
+        """
+        raise NotImplementedError(_('You must implement __call__'))
+
+
+class Request(webob.Request):
+    """Add some OpenStack API-specific logic to the base webob.Request."""
+
+    def __init__(self, *args, **kwargs):
+        super(Request, self).__init__(*args, **kwargs)
+        self._resource_cache = {}
+
+    def cache_resource(self, resource_to_cache, id_attribute='id', name=None):
+        """Cache the given resource.
+
+        Allow API methods to cache objects, such as results from a DB query,
+        to be used by API extensions within the same API request.
+
+        The resource_to_cache can be a list or an individual resource,
+        but ultimately resources are cached individually using the given
+        id_attribute.
+
+        Different resources types might need to be cached during the same
+        request, they can be cached using the name parameter. For example:
+
+            Controller 1:
+                request.cache_resource(db_volumes, 'volumes')
+                request.cache_resource(db_volume_types, 'types')
+            Controller 2:
+                db_volumes = request.cached_resource('volumes')
+                db_type_1 = request.cached_resource_by_id('1', 'types')
+
+        If no name is given, a default name will be used for the resource.
+
+        An instance of this class only lives for the lifetime of a
+        single API request, so there's no need to implement full
+        cache management.
+        """
+        if not isinstance(resource_to_cache, list):
+            resource_to_cache = [resource_to_cache]
+        if not name:
+            name = self.path
+        cached_resources = self._resource_cache.setdefault(name, {})
+        for resource in resource_to_cache:
+            cached_resources[resource[id_attribute]] = resource
+
+    def cached_resource(self, name=None):
+        """Get the cached resources cached under the given resource name.
+
+        Allow an API extension to get previously stored objects within
+        the same API request.
+
+        Note that the object data will be slightly stale.
+
+        :returns: a dict of id_attribute to the resource from the cached
+                  resources, an empty map if an empty collection was cached,
+                  or None if nothing has been cached yet under this name
+        """
+        if not name:
+            name = self.path
+        if name not in self._resource_cache:
+            # Nothing has been cached for this key yet
+            return None
+        return self._resource_cache[name]
+
+    def cached_resource_by_id(self, resource_id, name=None):
+        """Get a resource by ID cached under the given resource name.
+
+        Allow an API extension to get a previously stored object
+        within the same API request. This is basically a convenience method
+        to lookup by ID on the dictionary of all cached resources.
+
+        Note that the object data will be slightly stale.
+
+        :returns: the cached resource or None if the item is not in the cache
+        """
+        resources = self.cached_resource(name)
+        if not resources:
+            # Nothing has been cached yet for this key yet
+            return None
+        return resources.get(resource_id)
+
+    def cache_db_items(self, key, items, item_key='id'):
+        """Get cached database items.
+
+        Allow API methods to store objects from a DB query to be
+        used by API extensions within the same API request.
+
+        An instance of this class only lives for the lifetime of a
+        single API request, so there's no need to implement full
+        cache management.
+        """
+        self.cache_resource(items, item_key, key)
+
+    def get_db_items(self, key):
+        """Get database items.
+
+        Allow an API extension to get previously stored objects within
+        the same API request.
+
+        Note that the object data will be slightly stale.
+        """
+        return self.cached_resource(key)
+
+    def get_db_item(self, key, item_key):
+        """Get database item.
+
+        Allow an API extension to get a previously stored object
+        within the same API request.
+
+        Note that the object data will be slightly stale.
+        """
+        return self.get_db_items(key).get(item_key)
+
+    def cache_db_volumes(self, volumes):
+        # NOTE(mgagne) Cache it twice for backward compatibility reasons
+        self.cache_db_items('volumes', volumes, 'id')
+        self.cache_db_items(self.path, volumes, 'id')
+
+    def cache_db_volume(self, volume):
+        # NOTE(mgagne) Cache it twice for backward compatibility reasons
+        self.cache_db_items('volumes', [volume], 'id')
+        self.cache_db_items(self.path, [volume], 'id')
+
+    def get_db_volumes(self):
+        return (self.get_db_items('volumes') or
+                self.get_db_items(self.path))
+
+    def get_db_volume(self, volume_id):
+        return (self.get_db_item('volumes', volume_id) or
+                self.get_db_item(self.path, volume_id))
+
+    def cache_db_volume_types(self, volume_types):
+        self.cache_db_items('volume_types', volume_types, 'id')
+
+    def cache_db_volume_type(self, volume_type):
+        self.cache_db_items('volume_types', [volume_type], 'id')
+
+    def get_db_volume_types(self):
+        return self.get_db_items('volume_types')
+
+    def get_db_volume_type(self, volume_type_id):
+        return self.get_db_item('volume_types', volume_type_id)
+
+    def cache_db_snapshots(self, snapshots):
+        self.cache_db_items('snapshots', snapshots, 'id')
+
+    def cache_db_snapshot(self, snapshot):
+        self.cache_db_items('snapshots', [snapshot], 'id')
+
+    def get_db_snapshots(self):
+        return self.get_db_items('snapshots')
+
+    def get_db_snapshot(self, snapshot_id):
+        return self.get_db_item('snapshots', snapshot_id)
+
+    def cache_db_backups(self, backups):
+        self.cache_db_items('backups', backups, 'id')
+
+    def cache_db_backup(self, backup):
+        self.cache_db_items('backups', [backup], 'id')
+
+    def get_db_backups(self):
+        return self.get_db_items('backups')
+
+    def get_db_backup(self, backup_id):
+        return self.get_db_item('backups', backup_id)
+
+    def best_match_content_type(self):
+        """Determine the requested response content-type."""
+        if 'coriolis.best_content_type' not in self.environ:
+            # Calculate the best MIME type
+            content_type = None
+
+            # Check URL path suffix
+            parts = self.path.rsplit('.', 1)
+            if len(parts) > 1:
+                possible_type = 'application/' + parts[1]
+                if possible_type in SUPPORTED_CONTENT_TYPES:
+                    content_type = possible_type
+
+            if not content_type:
+                content_type = self.accept.best_match(SUPPORTED_CONTENT_TYPES)
+
+            self.environ['coriolis.best_content_type'] = (content_type or
+                                                          'application/json')
+
+        return self.environ['coriolis.best_content_type']
+
+    def get_content_type(self):
+        """Determine content type of the request body.
+
+        Does not do any body introspection, only checks header
+        """
+        if "Content-Type" not in self.headers:
+            return None
+
+        allowed_types = SUPPORTED_CONTENT_TYPES
+        content_type = self.content_type
+
+        if content_type not in allowed_types:
+            raise exception.InvalidContentType(content_type=content_type)
+
+        return content_type
+
+    def best_match_language(self):
+        """Determines best available locale from the Accept-Language header.
+
+        :returns: the best language match or None if the 'Accept-Language'
+                  header was not available in the request.
+        """
+        if not self.accept_language:
+            return None
+        all_languages = i18n.get_available_languages()
+        return self.accept_language.best_match(all_languages)
+
+
+class ActionDispatcher(object):
+    """Maps method name to local methods through action name."""
+
+    def dispatch(self, *args, **kwargs):
+        """Find and call local method."""
+        action = kwargs.pop('action', 'default')
+        action_method = getattr(self, str(action), self.default)
+        return action_method(*args, **kwargs)
+
+    def default(self, data):
+        raise NotImplementedError()
+
+
+class TextDeserializer(ActionDispatcher):
+    """Default request body deserialization."""
+
+    def deserialize(self, datastring, action='default'):
+        return self.dispatch(datastring, action=action)
+
+    def default(self, datastring):
+        return {}
+
+
+class JSONDeserializer(TextDeserializer):
+
+    def _from_json(self, datastring):
+        try:
+            return jsonutils.loads(datastring)
+        except ValueError:
+            msg = _("cannot understand JSON")
+            raise exception.MalformedRequestBody(reason=msg)
+
+    def default(self, datastring):
+        return {'body': self._from_json(datastring)}
+
+
+class DictSerializer(ActionDispatcher):
+    """Default request body serialization."""
+
+    def serialize(self, data, action='default'):
+        return self.dispatch(data, action=action)
+
+    def default(self, data):
+        return ""
+
+
+class JSONDictSerializer(DictSerializer):
+    """Default JSON request body serialization."""
+
+    def default(self, data):
+        return jsonutils.dumps(data)
+
+
+def serializers(**serializers):
+    """Attaches serializers to a method.
+
+    This decorator associates a dictionary of serializers with a
+    method.  Note that the function attributes are directly
+    manipulated; the method is not wrapped.
+    """
+
+    def decorator(func):
+        if not hasattr(func, 'wsgi_serializers'):
+            func.wsgi_serializers = {}
+        func.wsgi_serializers.update(serializers)
+        return func
+    return decorator
+
+
+def deserializers(**deserializers):
+    """Attaches deserializers to a method.
+
+    This decorator associates a dictionary of deserializers with a
+    method.  Note that the function attributes are directly
+    manipulated; the method is not wrapped.
+    """
+
+    def decorator(func):
+        if not hasattr(func, 'wsgi_deserializers'):
+            func.wsgi_deserializers = {}
+        func.wsgi_deserializers.update(deserializers)
+        return func
+    return decorator
+
+
+def response(code):
+    """Attaches response code to a method.
+
+    This decorator associates a response code with a method.  Note
+    that the function attributes are directly manipulated; the method
+    is not wrapped.
+    """
+
+    def decorator(func):
+        func.wsgi_code = code
+        return func
+    return decorator
+
+
+class ResponseObject(object):
+    """Bundles a response object with appropriate serializers.
+
+    Object that app methods may return in order to bind alternate
+    serializers with a response object to be serialized.  Its use is
+    optional.
+    """
+
+    def __init__(self, obj, code=None, **serializers):
+        """Binds serializers with an object.
+
+        Takes keyword arguments akin to the @serializer() decorator
+        for specifying serializers.  Serializers specified will be
+        given preference over default serializers or method-specific
+        serializers on return.
+        """
+
+        self.obj = obj
+        self.serializers = serializers
+        self._default_code = 200
+        self._code = code
+        self._headers = {}
+        self.serializer = None
+        self.media_type = None
+
+    def __getitem__(self, key):
+        """Retrieves a header with the given name."""
+
+        return self._headers[key.lower()]
+
+    def __setitem__(self, key, value):
+        """Sets a header with the given name to the given value."""
+
+        self._headers[key.lower()] = value
+
+    def __delitem__(self, key):
+        """Deletes the header with the given name."""
+
+        del self._headers[key.lower()]
+
+    def _bind_method_serializers(self, meth_serializers):
+        """Binds method serializers with the response object.
+
+        Binds the method serializers with the response object.
+        Serializers specified to the constructor will take precedence
+        over serializers specified to this method.
+
+        :param meth_serializers: A dictionary with keys mapping to
+                                 response types and values containing
+                                 serializer objects.
+        """
+
+        # We can't use update because that would be the wrong
+        # precedence
+        for mtype, serializer in meth_serializers.items():
+            self.serializers.setdefault(mtype, serializer)
+
+    def get_serializer(self, content_type, default_serializers=None):
+        """Returns the serializer for the wrapped object.
+
+        Returns the serializer for the wrapped object subject to the
+        indicated content type.  If no serializer matching the content
+        type is attached, an appropriate serializer drawn from the
+        default serializers will be used.  If no appropriate
+        serializer is available, raises InvalidContentType.
+        """
+
+        default_serializers = default_serializers or {}
+
+        try:
+            mtype = _MEDIA_TYPE_MAP.get(content_type, content_type)
+            if mtype in self.serializers:
+                return mtype, self.serializers[mtype]
+            else:
+                return mtype, default_serializers[mtype]
+        except (KeyError, TypeError):
+            raise exception.InvalidContentType(content_type=content_type)
+
+    def preserialize(self, content_type, default_serializers=None):
+        """Prepares the serializer that will be used to serialize.
+
+        Determines the serializer that will be used and prepares an
+        instance of it for later call.  This allows the serializer to
+        be accessed by extensions for, e.g., template extension.
+        """
+
+        mtype, serializer = self.get_serializer(content_type,
+                                                default_serializers)
+        self.media_type = mtype
+        self.serializer = serializer()
+
+    def attach(self, **kwargs):
+        """Attach slave templates to serializers."""
+
+        if self.media_type in kwargs:
+            self.serializer.attach(kwargs[self.media_type])
+
+    def serialize(self, request, content_type, default_serializers=None):
+        """Serializes the wrapped object.
+
+        Utility method for serializing the wrapped object.  Returns a
+        webob.Response object.
+        """
+
+        if self.serializer:
+            serializer = self.serializer
+        else:
+            _mtype, _serializer = self.get_serializer(content_type,
+                                                      default_serializers)
+            serializer = _serializer()
+
+        response = webob.Response()
+        response.status_int = self.code
+        for hdr, value in self._headers.items():
+            response.headers[hdr] = value
+        response.headers['Content-Type'] = content_type
+        if self.obj is not None:
+            body = serializer.serialize(self.obj)
+            if isinstance(body, six.text_type):
+                body = body.encode('utf-8')
+            response.body = body
+
+        return response
+
+    @property
+    def code(self):
+        """Retrieve the response status."""
+
+        return self._code or self._default_code
+
+    @property
+    def headers(self):
+        """Retrieve the headers."""
+
+        return self._headers.copy()
+
+
+def action_peek_json(body):
+    """Determine action to invoke."""
+
+    try:
+        decoded = jsonutils.loads(body)
+    except ValueError:
+        msg = _("cannot understand JSON")
+        raise exception.MalformedRequestBody(reason=msg)
+
+    # Make sure there's exactly one key...
+    if len(decoded) != 1:
+        msg = _("too many body keys")
+        raise exception.MalformedRequestBody(reason=msg)
+
+    # Return the action and the decoded body...
+    return list(decoded.keys())[0]
+
+
+class ResourceExceptionHandler(object):
+    """Context manager to handle Resource exceptions.
+
+    Used when processing exceptions generated by API implementation
+    methods (or their extensions).  Converts most exceptions to Fault
+    exceptions, with the appropriate logging.
+    """
+
+    def __enter__(self):
+        return None
+
+    def __exit__(self, ex_type, ex_value, ex_traceback):
+        if not ex_value:
+            return True
+
+        if isinstance(ex_value, exception.NotAuthorized):
+            raise Fault(webob.exc.HTTPForbidden(explanation=ex_value.msg))
+        elif isinstance(ex_value, exception.Invalid):
+            raise Fault(exception.ConvertedException(
+                code=ex_value.code, explanation=ex_value.msg))
+        elif isinstance(ex_value, TypeError):
+            exc_info = (ex_type, ex_value, ex_traceback)
+            LOG.error(_LE(
+                'Exception handling resource: %s'),
+                ex_value, exc_info=exc_info)
+            raise Fault(webob.exc.HTTPBadRequest())
+        elif isinstance(ex_value, Fault):
+            LOG.info(_LI("Fault thrown: %s"), ex_value)
+            raise ex_value
+        elif isinstance(ex_value, webob.exc.HTTPException):
+            LOG.info(_LI("HTTP exception thrown: %s"), ex_value)
+            raise Fault(ex_value)
+
+        # We didn't handle the exception
+        return False
+
+
+class Resource(Application):
+    """WSGI app that handles (de)serialization and controller dispatch.
+
+    WSGI app that reads routing information supplied by RoutesMiddleware
+    and calls the requested action method upon its controller.  All
+    controller action methods must accept a 'req' argument, which is the
+    incoming wsgi.Request. If the operation is a PUT or POST, the controller
+    method must also accept a 'body' argument (the deserialized request body).
+    They may raise a webob.exc exception or return a dict, which will be
+    serialized by requested content type.
+
+    Exceptions derived from webob.exc.HTTPException will be automatically
+    wrapped in Fault() to provide API friendly error responses.
+    """
+
+    def __init__(self, controller, action_peek=None, **deserializers):
+        """Initialize Resource.
+
+        :param controller: object that implement methods created by routes lib
+        :param action_peek: dictionary of routines for peeking into an action
+                            request body to determine the desired action
+        """
+
+        self.controller = controller
+
+        default_deserializers = dict(json=JSONDeserializer)
+        default_deserializers.update(deserializers)
+
+        self.default_deserializers = default_deserializers
+        self.default_serializers = dict(json=JSONDictSerializer)
+
+        self.action_peek = dict(json=action_peek_json)
+        self.action_peek.update(action_peek or {})
+
+        # Copy over the actions dictionary
+        self.wsgi_actions = {}
+        if controller:
+            self.register_actions(controller)
+
+        # Save a mapping of extensions
+        self.wsgi_extensions = {}
+        self.wsgi_action_extensions = {}
+
+    def register_actions(self, controller):
+        """Registers controller actions with this resource."""
+
+        actions = getattr(controller, 'wsgi_actions', {})
+        for key, method_name in actions.items():
+            self.wsgi_actions[key] = getattr(controller, method_name)
+
+    def register_extensions(self, controller):
+        """Registers controller extensions with this resource."""
+
+        extensions = getattr(controller, 'wsgi_extensions', [])
+        for method_name, action_name in extensions:
+            # Look up the extending method
+            extension = getattr(controller, method_name)
+
+            if action_name:
+                # Extending an action...
+                if action_name not in self.wsgi_action_extensions:
+                    self.wsgi_action_extensions[action_name] = []
+                self.wsgi_action_extensions[action_name].append(extension)
+            else:
+                # Extending a regular method
+                if method_name not in self.wsgi_extensions:
+                    self.wsgi_extensions[method_name] = []
+                self.wsgi_extensions[method_name].append(extension)
+
+    def get_action_args(self, request_environment):
+        """Parse dictionary created by routes library."""
+
+        # NOTE(Vek): Check for get_action_args() override in the
+        # controller
+        if hasattr(self.controller, 'get_action_args'):
+            return self.controller.get_action_args(request_environment)
+
+        try:
+            args = request_environment['wsgiorg.routing_args'][1].copy()
+        except (KeyError, IndexError, AttributeError):
+            return {}
+
+        try:
+            del args['controller']
+        except KeyError:
+            pass
+
+        try:
+            del args['format']
+        except KeyError:
+            pass
+
+        return args
+
+    def get_body(self, request):
+
+        if len(request.body) == 0:
+            LOG.debug("Empty body provided in request")
+            return None, ''
+
+        try:
+            content_type = request.get_content_type()
+        except exception.InvalidContentType:
+            LOG.debug("Unrecognized Content-Type provided in request")
+            return None, ''
+
+        if not content_type:
+            LOG.debug("No Content-Type provided in request")
+            return None, ''
+
+        return content_type, request.body
+
+    def deserialize(self, meth, content_type, body):
+        meth_deserializers = getattr(meth, 'wsgi_deserializers', {})
+        try:
+            mtype = _MEDIA_TYPE_MAP.get(content_type, content_type)
+            if mtype in meth_deserializers:
+                deserializer = meth_deserializers[mtype]
+            else:
+                deserializer = self.default_deserializers[mtype]
+        except (KeyError, TypeError):
+            raise exception.InvalidContentType(content_type=content_type)
+
+        return deserializer().deserialize(body)
+
+    def pre_process_extensions(self, extensions, request, action_args):
+        # List of callables for post-processing extensions
+        post = []
+
+        for ext in extensions:
+            if inspect.isgeneratorfunction(ext):
+                response = None
+
+                # If it's a generator function, the part before the
+                # yield is the preprocessing stage
+                try:
+                    with ResourceExceptionHandler():
+                        gen = ext(req=request, **action_args)
+                        response = next(gen)
+                except Fault as ex:
+                    response = ex
+
+                # We had a response...
+                if response:
+                    return response, []
+
+                # No response, queue up generator for post-processing
+                post.append(gen)
+            else:
+                # Regular functions only perform post-processing
+                post.append(ext)
+
+        # Run post-processing in the reverse order
+        return None, reversed(post)
+
+    def post_process_extensions(self, extensions, resp_obj, request,
+                                action_args):
+        for ext in extensions:
+            response = None
+            if inspect.isgenerator(ext):
+                # If it's a generator, run the second half of
+                # processing
+                try:
+                    with ResourceExceptionHandler():
+                        response = ext.send(resp_obj)
+                except StopIteration:
+                    # Normal exit of generator
+                    continue
+                except Fault as ex:
+                    response = ex
+            else:
+                # Regular functions get post-processing...
+                try:
+                    with ResourceExceptionHandler():
+                        response = ext(req=request, resp_obj=resp_obj,
+                                       **action_args)
+                except Fault as ex:
+                    response = ex
+
+            # We had a response...
+            if response:
+                return response
+
+        return None
+
+    @webob.dec.wsgify(RequestClass=Request)
+    def __call__(self, request):
+        """WSGI method that controls (de)serialization and method dispatch."""
+
+        LOG.info(_LI("%(method)s %(url)s"),
+                 {"method": request.method,
+                  "url": request.url})
+
+        # Identify the action, its arguments, and the requested
+        # content type
+        action_args = self.get_action_args(request.environ)
+        action = action_args.pop('action', None)
+        content_type, body = self.get_body(request)
+        accept = request.best_match_content_type()
+
+        # NOTE(Vek): Splitting the function up this way allows for
+        #            auditing by external tools that wrap the existing
+        #            function.  If we try to audit __call__(), we can
+        #            run into troubles due to the @webob.dec.wsgify()
+        #            decorator.
+        return self._process_stack(request, action, action_args,
+                                   content_type, body, accept)
+
+    def _process_stack(self, request, action, action_args,
+                       content_type, body, accept):
+        """Implement the processing stack."""
+
+        # Get the implementing method
+        try:
+            meth, extensions = self.get_method(request, action,
+                                               content_type, body)
+        except (AttributeError, TypeError):
+            return Fault(webob.exc.HTTPNotFound())
+        except KeyError as ex:
+            msg = _("There is no such action: %s") % ex.args[0]
+            return Fault(webob.exc.HTTPBadRequest(explanation=msg))
+        except exception.MalformedRequestBody:
+            msg = _("Malformed request body")
+            return Fault(webob.exc.HTTPBadRequest(explanation=msg))
+
+        # Now, deserialize the request body...
+        try:
+            if content_type:
+                contents = self.deserialize(meth, content_type, body)
+            else:
+                contents = {}
+        except exception.InvalidContentType:
+            msg = _("Unsupported Content-Type")
+            return Fault(webob.exc.HTTPBadRequest(explanation=msg))
+        except exception.MalformedRequestBody:
+            msg = _("Malformed request body")
+            return Fault(webob.exc.HTTPBadRequest(explanation=msg))
+
+        # Update the action args
+        action_args.update(contents)
+
+        project_id = action_args.pop("project_id", None)
+        context = request.environ.get('coriolis.context')
+        if (context and project_id and (project_id != context.project_id)):
+            msg = _("Malformed request url")
+            return Fault(webob.exc.HTTPBadRequest(explanation=msg))
+
+        # Run pre-processing extensions
+        response, post = self.pre_process_extensions(extensions,
+                                                     request, action_args)
+
+        if not response:
+            try:
+                with ResourceExceptionHandler():
+                    action_result = self.dispatch(meth, request, action_args)
+            except Fault as ex:
+                response = ex
+
+        if not response:
+            # No exceptions; convert action_result into a
+            # ResponseObject
+            resp_obj = None
+            if type(action_result) is dict or action_result is None:
+                resp_obj = ResponseObject(action_result)
+            elif isinstance(action_result, ResponseObject):
+                resp_obj = action_result
+            else:
+                response = action_result
+
+            # Run post-processing extensions
+            if resp_obj:
+                _set_request_id_header(request, resp_obj)
+                # Do a preserialize to set up the response object
+                serializers = getattr(meth, 'wsgi_serializers', {})
+                resp_obj._bind_method_serializers(serializers)
+                if hasattr(meth, 'wsgi_code'):
+                    resp_obj._default_code = meth.wsgi_code
+                resp_obj.preserialize(accept, self.default_serializers)
+
+                # Process post-processing extensions
+                response = self.post_process_extensions(post, resp_obj,
+                                                        request, action_args)
+
+            if resp_obj and not response:
+                response = resp_obj.serialize(request, accept,
+                                              self.default_serializers)
+
+        try:
+            msg_dict = dict(url=request.url, status=response.status_int)
+            msg = _LI("%(url)s returned with HTTP %(status)d")
+        except AttributeError as e:
+            msg_dict = dict(url=request.url, e=e)
+            msg = _LI("%(url)s returned a fault: %(e)s")
+
+        LOG.info(msg, msg_dict)
+
+        return response
+
+    def get_method(self, request, action, content_type, body):
+        """Look up the action-specific method and its extensions."""
+
+        # Look up the method
+        try:
+            if not self.controller:
+                meth = getattr(self, action)
+            else:
+                meth = getattr(self.controller, action)
+        except AttributeError as e:
+            with excutils.save_and_reraise_exception(e) as ctxt:
+                if (not self.wsgi_actions or action not in ['action',
+                                                            'create',
+                                                            'delete',
+                                                            'update']):
+                    LOG.exception(_LE('Get method error.'))
+                else:
+                    ctxt.reraise = False
+        else:
+            return meth, self.wsgi_extensions.get(action, [])
+
+        if action == 'action':
+            # OK, it's an action; figure out which action...
+            mtype = _MEDIA_TYPE_MAP.get(content_type)
+            action_name = self.action_peek[mtype](body)
+            LOG.debug("Action body: %s", body)
+        else:
+            action_name = action
+
+        # Look up the action method
+        return (self.wsgi_actions[action_name],
+                self.wsgi_action_extensions.get(action_name, []))
+
+    def dispatch(self, method, request, action_args):
+        """Dispatch a call to the action-specific method."""
+
+        return method(req=request, **action_args)
+
+
+def action(name):
+    """Mark a function as an action.
+
+    The given name will be taken as the action key in the body.
+
+    This is also overloaded to allow extensions to provide
+    non-extending definitions of create and delete operations.
+    """
+
+    def decorator(func):
+        func.wsgi_action = name
+        return func
+    return decorator
+
+
+def extends(*args, **kwargs):
+    """Indicate a function extends an operation.
+
+    Can be used as either::
+
+        @extends
+        def index(...):
+            pass
+
+    or as::
+
+        @extends(action='resize')
+        def _action_resize(...):
+            pass
+    """
+
+    def decorator(func):
+        # Store enough information to find what we're extending
+        func.wsgi_extends = (func.__name__, kwargs.get('action'))
+        return func
+
+    # If we have positional arguments, call the decorator
+    if args:
+        return decorator(*args)
+
+    # OK, return the decorator instead
+    return decorator
+
+
+class ControllerMetaclass(type):
+    """Controller metaclass.
+
+    This metaclass automates the task of assembling a dictionary
+    mapping action keys to method names.
+    """
+
+    def __new__(mcs, name, bases, cls_dict):
+        """Adds the wsgi_actions dictionary to the class."""
+
+        # Find all actions
+        actions = {}
+        extensions = []
+        # start with wsgi actions from base classes
+        for base in bases:
+            actions.update(getattr(base, 'wsgi_actions', {}))
+        for key, value in cls_dict.items():
+            if not callable(value):
+                continue
+            if getattr(value, 'wsgi_action', None):
+                actions[value.wsgi_action] = key
+            elif getattr(value, 'wsgi_extends', None):
+                extensions.append(value.wsgi_extends)
+
+        # Add the actions and extensions to the class dict
+        cls_dict['wsgi_actions'] = actions
+        cls_dict['wsgi_extensions'] = extensions
+
+        return super(ControllerMetaclass, mcs).__new__(mcs, name, bases,
+                                                       cls_dict)
+
+
+@six.add_metaclass(ControllerMetaclass)
+class Controller(object):
+    """Default controller."""
+
+    _view_builder_class = None
+
+    def __init__(self, view_builder=None):
+        """Initialize controller with a view builder instance."""
+        if view_builder:
+            self._view_builder = view_builder
+        elif self._view_builder_class:
+            self._view_builder = self._view_builder_class()
+        else:
+            self._view_builder = None
+
+    @staticmethod
+    def is_valid_body(body, entity_name):
+        if not (body and entity_name in body):
+            return False
+
+        def is_dict(d):
+            try:
+                d.get(None)
+                return True
+            except AttributeError:
+                return False
+
+        if not is_dict(body[entity_name]):
+            return False
+
+        return True
+
+    @staticmethod
+    def assert_valid_body(body, entity_name):
+        # NOTE: After v1 api is deprecated need to merge 'is_valid_body' and
+        #       'assert_valid_body' in to one method. Right now it is not
+        #       possible to modify 'is_valid_body' to raise exception because
+        #       in case of V1 api when 'is_valid_body' return False,
+        #       'HTTPUnprocessableEntity' exception is getting raised and in
+        #       V2 api 'HTTPBadRequest' exception is getting raised.
+        if not Controller.is_valid_body(body, entity_name):
+            raise webob.exc.HTTPBadRequest(
+                explanation=_("Missing required element '%s' in "
+                              "request body.") % entity_name)
+
+    @staticmethod
+    def validate_name_and_description(body):
+        name = body.get('name')
+        if name is not None:
+            if isinstance(name, six.string_types):
+                body['name'] = name.strip()
+            try:
+                _check_string_length(body['name'], 'Name',
+                                     min_length=0, max_length=255)
+            except exception.InvalidInput as error:
+                raise webob.exc.HTTPBadRequest(explanation=error.msg)
+
+        description = body.get('description')
+        if description is not None:
+            try:
+                _check_string_length(description, 'Description',
+                                     min_length=0, max_length=255)
+            except exception.InvalidInput as error:
+                raise webob.exc.HTTPBadRequest(explanation=error.msg)
+
+    @staticmethod
+    def validate_string_length(value, entity_name, min_length=0,
+                               max_length=None, remove_whitespaces=False):
+        """Check the length of specified string.
+
+        :param value: the value of the string
+        :param entity_name: the name of the string
+        :param min_length: the min_length of the string
+        :param max_length: the max_length of the string
+        :param remove_whitespaces: True if trimming whitespaces is needed
+                                   else False
+        """
+        if isinstance(value, six.string_types) and remove_whitespaces:
+            value = value.strip()
+        try:
+            _check_string_length(value, entity_name,
+                                 min_length=min_length,
+                                 max_length=max_length)
+        except exception.InvalidInput as error:
+            raise webob.exc.HTTPBadRequest(explanation=error.msg)
+
+    @staticmethod
+    def validate_integer(value, name, min_value=None, max_value=None):
+        """Make sure that value is a valid integer, potentially within range.
+
+        :param value: the value of the integer
+        :param name: the name of the integer
+        :param min_length: the min_length of the integer
+        :param max_length: the max_length of the integer
+        :returns: integer
+        """
+        try:
+            value = int(value)
+        except (TypeError, ValueError, UnicodeEncodeError):
+            raise webob.exc.HTTPBadRequest(explanation=(
+                _('%s must be an integer.') % name))
+
+        if min_value is not None and value < min_value:
+            raise webob.exc.HTTPBadRequest(
+                explanation=(_('%(value_name)s must be >= %(min_value)d') %
+                             {'value_name': name, 'min_value': min_value}))
+        if max_value is not None and value > max_value:
+            raise webob.exc.HTTPBadRequest(
+                explanation=(_('%(value_name)s must be <= %(max_value)d') %
+                             {'value_name': name, 'max_value': max_value}))
+
+        return value
+
+
+class Fault(webob.exc.HTTPException):
+    """Wrap webob.exc.HTTPException to provide API friendly response."""
+
+    _fault_names = {400: "badRequest",
+                    401: "unauthorized",
+                    403: "forbidden",
+                    404: "itemNotFound",
+                    405: "badMethod",
+                    409: "conflictingRequest",
+                    413: "overLimit",
+                    415: "badMediaType",
+                    501: "notImplemented",
+                    503: "serviceUnavailable"}
+
+    def __init__(self, exception):
+        """Create a Fault for the given webob.exc.exception."""
+        self.wrapped_exc = exception
+        self.status_int = exception.status_int
+
+    @webob.dec.wsgify(RequestClass=Request)
+    def __call__(self, req):
+        """Generate a WSGI response based on the exception passed to ctor."""
+        # Replace the body with fault details.
+        locale = req.best_match_language()
+        code = self.wrapped_exc.status_int
+        fault_name = self._fault_names.get(code, "computeFault")
+        explanation = self.wrapped_exc.explanation
+        fault_data = {
+            fault_name: {
+                'code': code,
+                'message': i18n.translate(explanation, locale)}}
+        if code == 413:
+            retry = self.wrapped_exc.headers.get('Retry-After', None)
+            if retry:
+                fault_data[fault_name]['retryAfter'] = retry
+
+        # 'code' is an attribute on the fault tag itself
+        metadata = {'attributes': {fault_name: 'code'}}
+
+        content_type = req.best_match_content_type()
+        serializer = {
+            'application/json': JSONDictSerializer(),
+        }[content_type]
+
+        body = serializer.serialize(fault_data)
+        if isinstance(body, six.text_type):
+            body = body.encode('utf-8')
+        self.wrapped_exc.body = body
+        self.wrapped_exc.content_type = content_type
+        _set_request_id_header(req, self.wrapped_exc.headers)
+
+        return self.wrapped_exc
+
+    def __str__(self):
+        return self.wrapped_exc.__str__()
+
+
+def _set_request_id_header(req, headers):
+    context = req.environ.get('coriolis.context')
+    if context:
+        headers['x-compute-request-id'] = context.request_id
+
+
+def _check_string_length(value, name, min_length=0, max_length=None):
+    """Check the length of specified string.
+    :param value: the value of the string
+    :param name: the name of the string
+    :param min_length: the min_length of the string
+    :param max_length: the max_length of the string
+    """
+    if not isinstance(value, six.string_types):
+        msg = _("%s is not a string or unicode") % name
+        raise exception.InvalidInput(message=msg)
+
+    if len(value) < min_length:
+        msg = _("%(name)s has a minimum character requirement of "
+                "%(min_length)s.") % {'name': name, 'min_length': min_length}
+        raise exception.InvalidInput(message=msg)
+
+    if max_length and len(value) > max_length:
+        msg = _("%(name)s has more than %(max_length)s "
+                "characters.") % {'name': name, 'max_length': max_length}
+        raise exception.InvalidInput(message=msg)
+
+
+class OverLimitFault(webob.exc.HTTPException):
+    """Rate-limited request response."""
+
+    def __init__(self, message, details, retry_time):
+        """Initialize new `OverLimitFault` with relevant information."""
+        hdrs = OverLimitFault._retry_after(retry_time)
+        self.wrapped_exc = webob.exc.HTTPRequestEntityTooLarge(headers=hdrs)
+        self.content = {
+            "overLimitFault": {
+                "code": self.wrapped_exc.status_int,
+                "message": message,
+                "details": details,
+            },
+        }
+
+    @staticmethod
+    def _retry_after(retry_time):
+        delay = int(math.ceil(retry_time - time.time()))
+        retry_after = delay if delay > 0 else 0
+        headers = {'Retry-After': '%d' % retry_after}
+        return headers
+
+    @webob.dec.wsgify(RequestClass=Request)
+    def __call__(self, request):
+        """Serializes the wrapped exception conforming to our error format."""
+        content_type = request.best_match_content_type()
+        metadata = {"attributes": {"overLimitFault": "code"}}
+
+        def translate(msg):
+            locale = request.best_match_language()
+            return i18n.translate(msg, locale)
+
+        self.content['overLimitFault']['message'] = \
+            translate(self.content['overLimitFault']['message'])
+        self.content['overLimitFault']['details'] = \
+            translate(self.content['overLimitFault']['details'])
+
+        serializer = {
+            'application/json': JSONDictSerializer(),
+        }[content_type]
+
+        content = serializer.serialize(self.content)
+        self.wrapped_exc.body = content
+
+        return self.wrapped_exc

+ 28 - 0
coriolis/cmd/api.py

@@ -0,0 +1,28 @@
+import eventlet
+eventlet.monkey_patch()
+
+import sys
+
+from coriolis import service
+
+from oslo_config import cfg
+from oslo_log import log as logging
+
+CONF = cfg.CONF
+
+
+def main():
+    logging.register_options(CONF)
+    logging.setup(CONF, 'coriolis')
+
+    CONF(sys.argv[1:], project='coriolis',
+         version="1.0.0")
+
+    launcher = service.get_process_launcher()
+    server = service.WSGIService('osapi_migration')
+    launcher.launch_service(server, workers=server.get_workers_count())
+    launcher.wait()
+
+
+if __name__ == "__main__":
+    main()

+ 31 - 0
coriolis/cmd/conductor.py

@@ -0,0 +1,31 @@
+import eventlet
+eventlet.monkey_patch()
+
+import sys
+
+from coriolis.conductor.rpc import server as rpc_server
+from coriolis import service
+
+from oslo_config import cfg
+from oslo_log import log as logging
+
+CONF = cfg.CONF
+
+
+def main():
+    logging.register_options(CONF)
+    logging.setup(CONF, 'coriolis')
+
+    CONF(sys.argv[1:], project='coriolis',
+         version="1.0.0")
+
+    launcher = service.get_process_launcher()
+    server = service.MessagingService(
+        'coriolis_conductor', [rpc_server.ConductorServerEndpoint()],
+        rpc_server.VERSION)
+    launcher.launch_service(server, workers=server.get_workers_count())
+    launcher.wait()
+
+
+if __name__ == "__main__":
+    main()

+ 31 - 0
coriolis/cmd/worker.py

@@ -0,0 +1,31 @@
+import eventlet
+eventlet.monkey_patch()
+
+import sys
+
+from coriolis.worker.rpc import server as rpc_server
+from coriolis import service
+
+from oslo_config import cfg
+from oslo_log import log as logging
+
+CONF = cfg.CONF
+
+
+def main():
+    logging.register_options(CONF)
+    logging.setup(CONF, 'coriolis')
+
+    CONF(sys.argv[1:], project='coriolis',
+         version="1.0.0")
+
+    launcher = service.get_process_launcher()
+    server = service.MessagingService(
+        'coriolis_worker', [rpc_server.WorkerServerEndpoint()],
+        rpc_server.VERSION)
+    launcher.launch_service(server, workers=server.get_workers_count())
+    launcher.wait()
+
+
+if __name__ == "__main__":
+    main()

+ 0 - 0
coriolis/conductor/__init__.py


+ 0 - 0
coriolis/conductor/rpc/__init__.py


+ 39 - 0
coriolis/conductor/rpc/client.py

@@ -0,0 +1,39 @@
+from oslo_config import cfg
+import oslo_messaging as messaging
+
+CONF = cfg.CONF
+CONF.import_opt("messaging_transport_url", "coriolis.service")
+
+VERSION = "1.0"
+
+
+class ConductorClient(object):
+    def __init__(self):
+        target = messaging.Target(topic='coriolis_conductor', version=VERSION)
+        transport = messaging.get_transport(
+            cfg.CONF, CONF.messaging_transport_url)
+        self._client = messaging.RPCClient(transport, target)
+
+    def get_migrations(self, ctxt):
+        return self._client.call(ctxt, 'get_migrations')
+
+    def get_migration(self, ctxt, migration_id):
+        return self._client.call(
+            ctxt, 'get_migration', migration_id=migration_id)
+
+    def begin_migrate_instances(self, ctxt, origin, destination, instances):
+        self._client.call(
+            ctxt, 'migrate_instances', origin=origin, destination=destination,
+            instances=instances)
+
+    def set_operation_host(self, ctxt, operation_id, host):
+        self._client.call(
+            ctxt, 'set_operation_host', operation_id=operation_id, host=host)
+
+    def export_completed(self, ctxt, operation_id, export_info):
+        self._client.call(
+            ctxt, 'export_completed', operation_id=operation_id,
+            export_info=export_info)
+
+    def import_completed(self, ctxt, operation_id):
+        self._client.call(ctxt, 'import_completed', operation_id=operation_id)

+ 94 - 0
coriolis/conductor/rpc/server.py

@@ -0,0 +1,94 @@
+import uuid
+
+import json
+
+import oslo_messaging as messaging
+
+from coriolis import constants
+from coriolis.db import api as db_api
+from coriolis.db.sqlalchemy import models
+from coriolis.worker.rpc import client as rpc_worker_client
+
+VERSION = "1.0"
+
+
+class ConductorServerEndpoint(object):
+    def __init__(self):
+        self._rpc_worker_client = rpc_worker_client.WorkerClient()
+
+    def get_migrations(self, ctxt):
+        # TODO: fix context
+        from coriolis import context
+        ctxt = context.CoriolisContext()
+
+        return db_api.get_migrations(ctxt)
+
+    def get_migration(self, ctxt, migration_id):
+        # TODO: fix context
+        from coriolis import context
+        ctxt = context.CoriolisContext()
+
+        return db_api.get_migration(ctxt, migration_id)
+
+    def migrate_instances(self, ctxt, origin, destination, instances):
+        # TODO: fix context
+        from coriolis import context
+        ctxt = context.CoriolisContext()
+
+        migration = models.Migration()
+        migration.user_id = "todo"
+        migration.status = constants.MIGRATION_STATUS_STARTED
+        migration.origin = json.dumps(origin)
+        migration.destination = json.dumps(destination)
+
+        for instance in instances:
+            op = models.Operation()
+            op.id = str(uuid.uuid4())
+            op.migration = migration
+            op.instance = instance
+            op.status = constants.OPERATION_STATUS_STARTED
+            op.operation_type = constants.OPERATION_TYPE_EXPORT
+
+        db_api.add(ctxt, migration)
+
+        for op in migration.operations:
+            self._rpc_worker_client.begin_export_instance(
+                ctxt.to_dict(), op.id, origin, instance)
+
+    def set_operation_host(self, ctxt, operation_id, host):
+        # TODO: fix context
+        from coriolis import context
+        ctxt = context.CoriolisContext()
+        db_api.set_operation_host(ctxt, operation_id, host)
+
+    def export_completed(self, ctxt, operation_id, export_info):
+        # TODO: fix context
+        from coriolis import context
+        ctxt = context.CoriolisContext()
+
+        db_api.update_operation_status(
+            ctxt, operation_id, constants.OPERATION_STATUS_COMPLETE)
+        op_export = db_api.get_operation(ctxt, operation_id)
+
+        op_import = models.Operation()
+        op_import.id = str(uuid.uuid4())
+        op_import.migration = op_export.migration
+        op_import.instance = op_export.instance
+        op_import.status = constants.OPERATION_STATUS_STARTED
+        op_import.operation_type = constants.OPERATION_TYPE_IMPORT
+
+        db_api.add(ctxt, op_import)
+
+        self._rpc_worker_client.begin_import_instance(
+            ctxt.to_dict(), op_export.host, op_import.id,
+            json.loads(op_import.migration.destination),
+            op_import.instance,
+            export_info)
+
+    def import_completed(self, ctxt, operation_id):
+        # TODO: fix context
+        from coriolis import context
+        ctxt = context.CoriolisContext()
+
+        db_api.update_operation_status(
+            ctxt, operation_id, constants.OPERATION_STATUS_COMPLETE)

+ 13 - 0
coriolis/constants.py

@@ -0,0 +1,13 @@
+MIGRATION_STATUS_STARTED = "STARTED"
+MIGRATION_STATUS_COMPLETE = "COMPLETE"
+MIGRATION_STATUS_ERROR = "ERROR"
+
+OPERATION_STATUS_STARTED = "STARTED"
+OPERATION_STATUS_COMPLETE = "COMPLETE"
+OPERATION_STATUS_ERROR = "ERROR"
+
+OPERATION_TYPE_EXPORT = "EXPORT"
+OPERATION_TYPE_IMPORT = "IMPORT"
+
+PROVIDER_TYPE_IMPORT = 1
+PROVIDER_TYPE_EXPORT = 2

+ 16 - 0
coriolis/context.py

@@ -0,0 +1,16 @@
+from oslo_context import context
+from oslo_db.sqlalchemy import enginefacade
+
+
+@enginefacade.transaction_context_provider
+class CoriolisContext(context.RequestContext):
+    def __init__(self):
+        self.user_id = "todo"
+
+    def to_dict(self):
+        # values = super(CoriolisContext, self).to_dict()
+        values = {}
+        values.update({
+            'user_id': self.user_id,
+        })
+        return values

+ 0 - 0
coriolis/db/__init__.py


+ 75 - 0
coriolis/db/api.py

@@ -0,0 +1,75 @@
+from oslo_config import cfg
+from oslo_db import api as db_api
+from oslo_db import options as db_options
+from oslo_db.sqlalchemy import enginefacade
+from sqlalchemy import orm
+
+from coriolis.db.sqlalchemy import models
+
+CONF = cfg.CONF
+db_options.set_defaults(CONF)
+
+
+_BACKEND_MAPPING = {'sqlalchemy': 'coriolis.db.sqlalchemy.api'}
+IMPL = db_api.DBAPI.from_config(CONF, backend_mapping=_BACKEND_MAPPING)
+
+
+def get_engine():
+    return IMPL.get_engine()
+
+
+def get_session():
+    return IMPL.get_session()
+
+
+def db_sync(engine, version=None):
+    """Migrate the database to `version` or the most recent version."""
+    return IMPL.db_sync(engine, version=version)
+
+
+def db_version(engine):
+    """Display the current database version."""
+    return IMPL.db_version(engine)
+
+
+@enginefacade.reader
+def get_migrations(context):
+    return context.session.query(models.Migration).options(
+        orm.joinedload("operations")).filter_by(
+        user_id=context.user_id).all()
+
+
+@enginefacade.reader
+def get_migration(context, migration_id):
+    return context.session.query(models.Migration).options(
+        orm.joinedload("operations")).filter_by(
+        user_id=context.user_id, id=migration_id).first()
+
+
+@enginefacade.writer
+def add(context, migration):
+    context.session.add(migration)
+
+
+@enginefacade.writer
+def update_operation_status(context, operation_id, status):
+    op = context.session.query(models.Operation).filter_by(
+        id=operation_id).first()
+    op.status = status
+
+
+@enginefacade.writer
+def set_operation_host(context, operation_id, host):
+    op = context.session.query(models.Operation).filter_by(
+        id=operation_id).first()
+    op.host = host
+
+
+@enginefacade.reader
+def get_operation(context, operation_id):
+    return context.session.query(models.Operation).options(
+        orm.joinedload("migration")).filter_by(
+        id=operation_id).first()
+
+# TODO: move from here
+db_sync(get_engine())

+ 0 - 0
coriolis/db/sqlalchemy/__init__.py


+ 49 - 0
coriolis/db/sqlalchemy/api.py

@@ -0,0 +1,49 @@
+import sys
+
+from oslo_config import cfg
+from oslo_db import options as db_options
+from oslo_db.sqlalchemy import session as db_session
+
+from coriolis.db.sqlalchemy import migration
+
+CONF = cfg.CONF
+db_options.set_defaults(CONF)
+
+_facade = None
+
+
+def get_facade():
+    global _facade
+    if not _facade:
+        # TODO: investigate why the CONF.database.connection is None!
+        # _facade = db_session.EngineFacade(CONF.database.connection)
+        # _facade = db_session.EngineFacade.from_config(CONF)
+        _facade = db_session.EngineFacade(
+            "mysql://coriolis:Passw0rd@localhost/coriolis")
+    return _facade
+
+
+def get_engine():
+    return get_facade().get_engine()
+
+
+def get_session():
+    return get_facade().get_session()
+
+
+def get_backend():
+    """The backend is this module itself."""
+    return sys.modules[__name__]
+
+
+def db_sync(engine, version=None):
+    """Migrate the database to `version` or the most recent version."""
+    if version is not None and int(version) < db_version(engine):
+        raise Exception(_("Cannot migrate to lower schema version."))
+
+    return migration.db_sync(engine, version=version)
+
+
+def db_version(engine):
+    """Display the current database version."""
+    return migration.db_version(engine)

+ 0 - 0
coriolis/db/sqlalchemy/migrate_repo/__init__.py


+ 5 - 0
coriolis/db/sqlalchemy/migrate_repo/manage.py

@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+from migrate.versioning.shell import main
+
+if __name__ == '__main__':
+    main(debug='False')

+ 25 - 0
coriolis/db/sqlalchemy/migrate_repo/migrate.cfg

@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=coriolis
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to 
+# change the table name in each database too. 
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the 
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the 
+# commit continues, perhaps ending successfully. 
+# Databases in this list MUST compile successfully during a commit, or the 
+# entire commit will fail. List the databases your application will actually 
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False

+ 55 - 0
coriolis/db/sqlalchemy/migrate_repo/versions/001_initial.py

@@ -0,0 +1,55 @@
+import uuid
+
+import sqlalchemy
+
+
+def upgrade(migrate_engine):
+    meta = sqlalchemy.MetaData()
+    meta.bind = migrate_engine
+
+    migration = sqlalchemy.Table(
+        'migration', meta,
+        sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True,
+                          nullable=False),
+        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
+        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
+        sqlalchemy.Column("user_id", sqlalchemy.String(255), nullable=False),
+        sqlalchemy.Column("origin", sqlalchemy.String(1024), nullable=False),
+        sqlalchemy.Column("destination", sqlalchemy.String(1024),
+                          nullable=False),
+        sqlalchemy.Column("status", sqlalchemy.String(100), nullable=False),
+        mysql_engine='InnoDB',
+        mysql_charset='utf8'
+    )
+
+    operation = sqlalchemy.Table(
+        'operation', meta,
+        sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
+                          default=lambda: str(uuid.uuid4())),
+        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
+        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
+        sqlalchemy.Column("migration_id", sqlalchemy.Integer,
+                          sqlalchemy.ForeignKey('migration.id'),
+                          nullable=False),
+        sqlalchemy.Column("instance", sqlalchemy.String(1024), nullable=False),
+        sqlalchemy.Column("host", sqlalchemy.String(1024), nullable=True),
+        sqlalchemy.Column("status", sqlalchemy.String(100), nullable=False),
+        sqlalchemy.Column("operation_type", sqlalchemy.String(100)
+                          nullable=False),
+        mysql_engine='InnoDB',
+        mysql_charset='utf8'
+    )
+
+    tables = (
+        migration,
+        operation,
+    )
+
+    for index, table in enumerate(tables):
+        try:
+            table.create()
+        except Exception:
+            # If an error occurs, drop all tables created so far to return
+            # to the previously existing state.
+            meta.drop_all(tables=tables[:index])
+            raise

+ 0 - 0
coriolis/db/sqlalchemy/migrate_repo/versions/__init__.py


+ 24 - 0
coriolis/db/sqlalchemy/migration.py

@@ -0,0 +1,24 @@
+import os
+
+from oslo_db.sqlalchemy import migration as oslo_migration
+
+INIT_VERSION = 0
+
+
+def db_sync(engine, version=None):
+    path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
+                        'migrate_repo')
+    return oslo_migration.db_sync(engine, path, version,
+                                  init_version=INIT_VERSION)
+
+
+def db_version(engine):
+    path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
+                        'migrate_repo')
+    return oslo_migration.db_version(engine, path, INIT_VERSION)
+
+
+def db_version_control(engine, version=None):
+    path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
+                        'migrate_repo')
+    return oslo_migration.db_version_control(engine, path, version)

+ 36 - 0
coriolis/db/sqlalchemy/models.py

@@ -0,0 +1,36 @@
+from oslo_db.sqlalchemy import models
+from sqlalchemy.ext import declarative
+from sqlalchemy.orm import relationship, backref
+from sqlalchemy import (Column, Index, Integer, BigInteger, Enum, String,
+                        schema, Unicode)
+from sqlalchemy import ForeignKey, DateTime, Boolean, Text, Float
+
+BASE = declarative.declarative_base()
+
+
+class Operation(BASE, models.TimestampMixin, models.ModelBase):
+    __tablename__ = 'operation'
+
+    id = Column(String(36), default=lambda: str(uuid.uuid4()),
+                primary_key=True)
+    migration_id = Column(Integer,
+                          ForeignKey('migration.id'),
+                          nullable=False)
+    # migration = relationship("Migration",
+    # backref=backref("operations"), lazy='joined')
+    instance = Column(String(1024), nullable=False)
+    host = Column(String(1024), nullable=True)
+    status = Column(String(100), nullable=False)
+    operation_type = Column(String(100), nullable=False)
+
+
+class Migration(BASE, models.TimestampMixin, models.ModelBase):
+    __tablename__ = 'migration'
+
+    id = Column(Integer, primary_key=True)
+    user_id = Column(String(255), nullable=False)
+    origin = Column(String(1024), nullable=False)
+    destination = Column(String(1024), nullable=False)
+    status = Column(String(100), nullable=False)
+    operations = relationship(Operation, cascade="all,delete",
+                              backref=backref('migration'))

+ 231 - 0
coriolis/exception.py

@@ -0,0 +1,231 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import sys
+
+from oslo_config import cfg
+from oslo_log import log as logging
+from oslo_versionedobjects import exception as obj_exc
+import six
+import webob.exc
+from webob.util import status_generic_reasons
+from webob.util import status_reasons
+
+from coriolis.i18n import _, _LE
+
+
+LOG = logging.getLogger(__name__)
+
+CONF = cfg.CONF
+
+
+class ConvertedException(webob.exc.WSGIHTTPException):
+
+    def __init__(self, code=500, title="", explanation=""):
+        self.code = code
+        # There is a strict rule about constructing status line for HTTP:
+        # '...Status-Line, consisting of the protocol version followed by a
+        # numeric status code and its associated textual phrase, with each
+        # element separated by SP characters'
+        # (http://www.faqs.org/rfcs/rfc2616.html)
+        # 'code' and 'title' can not be empty because they correspond
+        # to numeric status code and its associated text
+        if title:
+            self.title = title
+        else:
+            try:
+                self.title = status_reasons[self.code]
+            except KeyError:
+                generic_code = self.code // 100
+                self.title = status_generic_reasons[generic_code]
+        self.explanation = explanation
+        super(ConvertedException, self).__init__()
+
+
+class Error(Exception):
+    pass
+
+
+class CoriolisException(Exception):
+    """Base Coriolis Exception
+
+    To correctly use this class, inherit from it and define
+    a 'message' property. That message will get printf'd
+    with the keyword arguments provided to the constructor.
+
+    """
+    message = _("An unknown exception occurred.")
+    code = 500
+    headers = {}
+    safe = False
+
+    def __init__(self, message=None, **kwargs):
+        self.kwargs = kwargs
+        self.kwargs['message'] = message
+
+        if 'code' not in self.kwargs:
+            try:
+                self.kwargs['code'] = self.code
+            except AttributeError:
+                pass
+
+        for k, v in self.kwargs.items():
+            if isinstance(v, Exception):
+                self.kwargs[k] = six.text_type(v)
+
+        if self._should_format():
+            try:
+                message = self.message % kwargs
+
+            except Exception:
+                exc_info = sys.exc_info()
+                # kwargs doesn't match a variable in the message
+                # log the issue and the kwargs
+                LOG.exception(_LE('Exception in string format operation'))
+                for name, value in kwargs.items():
+                    LOG.error(_LE("%(name)s: %(value)s"),
+                              {'name': name, 'value': value})
+                if CONF.fatal_exception_format_errors:
+                    six.reraise(*exc_info)
+                # at least get the core message out if something happened
+                message = self.message
+        elif isinstance(message, Exception):
+            message = six.text_type(message)
+
+        # NOTE(luisg): We put the actual message in 'msg' so that we can access
+        # it, because if we try to access the message via 'message' it will be
+        # overshadowed by the class' message attribute
+        self.msg = message
+        super(CoriolisException, self).__init__(message)
+
+    def _should_format(self):
+        return self.kwargs['message'] is None or '%(message)' in self.message
+
+    def __unicode__(self):
+        return six.text_type(self.msg)
+
+
+class NotAuthorized(CoriolisException):
+    message = _("Not authorized.")
+    code = 403
+
+
+class AdminRequired(NotAuthorized):
+    message = _("User does not have admin privileges")
+
+
+class PolicyNotAuthorized(NotAuthorized):
+    message = _("Policy doesn't allow %(action)s to be performed.")
+
+
+class Invalid(CoriolisException):
+    message = _("Unacceptable parameters.")
+    code = 400
+
+
+class InvalidResults(Invalid):
+    message = _("The results are invalid.")
+
+
+class InvalidInput(Invalid):
+    message = _("Invalid input received: %(reason)s")
+
+
+class InvalidContentType(Invalid):
+    message = _("Invalid content type %(content_type)s.")
+
+
+class InvalidHost(Invalid):
+    message = _("Invalid host: %(reason)s")
+
+
+# Cannot be templated as the error syntax varies.
+# msg needs to be constructed when raised.
+class InvalidParameterValue(Invalid):
+    message = _("%(err)s")
+
+
+class InvalidAuthKey(Invalid):
+    message = _("Invalid auth key: %(reason)s")
+
+
+class InvalidConfigurationValue(Invalid):
+    message = _('Value "%(value)s" is not valid for '
+                'configuration option "%(option)s"')
+
+
+class ServiceUnavailable(Invalid):
+    message = _("Service is unavailable at this time.")
+
+
+class APIException(CoriolisException):
+    message = _("Error while requesting %(service)s API.")
+
+    def __init__(self, message=None, **kwargs):
+        if 'service' not in kwargs:
+            kwargs['service'] = 'unknown'
+        super(APIException, self).__init__(message, **kwargs)
+
+
+class APITimeout(APIException):
+    message = _("Timeout while requesting %(service)s API.")
+
+
+class NotFound(CoriolisException):
+    message = _("Resource could not be found.")
+    code = 404
+    safe = True
+
+
+class FileNotFound(NotFound):
+    message = _("File %(file_path)s could not be found.")
+
+
+class Duplicate(CoriolisException):
+    pass
+
+
+class MalformedRequestBody(CoriolisException):
+    message = _("Malformed message body: %(reason)s")
+
+
+class ConfigNotFound(NotFound):
+    message = _("Could not find config at %(path)s")
+
+
+class ParameterNotFound(NotFound):
+    message = _("Could not find parameter %(param)s")
+
+
+class PasteAppNotFound(NotFound):
+    message = _("Could not load paste app '%(name)s' from %(path)s")
+
+
+class NoValidHost(CoriolisException):
+    message = _("No valid host was found. %(reason)s")
+
+
+UnsupportedObjectError = obj_exc.UnsupportedObjectError
+OrphanedObjectError = obj_exc.OrphanedObjectError
+IncompatibleObjectVersion = obj_exc.IncompatibleObjectVersion
+ReadOnlyFieldError = obj_exc.ReadOnlyFieldError
+ObjectActionError = obj_exc.ObjectActionError
+ObjectFieldInvalid = obj_exc.ObjectFieldInvalid
+
+
+class NotSupportedOperation(Invalid):
+    message = _("Operation not supported: %(operation)s.")
+    code = 405

+ 50 - 0
coriolis/i18n.py

@@ -0,0 +1,50 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""oslo.i18n integration module.
+
+See http://docs.openstack.org/developer/oslo.i18n/usage.html .
+
+"""
+
+import oslo_i18n as i18n
+
+DOMAIN = 'cinder'
+
+_translators = i18n.TranslatorFactory(domain=DOMAIN)
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
+
+# Translators for log levels.
+#
+# The abbreviated names are meant to reflect the usual use of a short
+# name like '_'. The "L" is for "log" and the other letter comes from
+# the level.
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
+
+
+def enable_lazy(enable=True):
+    return i18n.enable_lazy(enable)
+
+
+def translate(value, user_locale=None):
+    return i18n.translate(value, user_locale)
+
+
+def get_available_languages():
+    return i18n.get_available_languages(DOMAIN)

+ 20 - 0
coriolis/migrations/api.py

@@ -0,0 +1,20 @@
+from coriolis.conductor.rpc import client as rpc_client
+from coriolis import context
+
+
+class API(object):
+    def __init__(self):
+        self._rpc_client = rpc_client.ConductorClient()
+
+    def start(self, origin, destination, instances):
+        ctxt = context.CoriolisContext()
+        self._rpc_client.begin_migrate_instances(
+            ctxt.to_dict(), origin, destination, instances)
+
+    def get_migrations(self):
+        ctxt = context.CoriolisContext()
+        return self._rpc_client.get_migrations(ctxt.to_dict())
+
+    def get_migration(self, migration_id):
+        ctxt = context.CoriolisContext()
+        return self._rpc_client.get_migration(ctxt.to_dict(), migration_id)

+ 0 - 0
coriolis/providers/__init__.py


+ 10 - 0
coriolis/providers/base.py

@@ -0,0 +1,10 @@
+class Baseprovider(object):
+    pass
+
+
+class BaseImportProvider(Baseprovider):
+    pass
+
+
+class BaseExportProvider(Baseprovider):
+    pass

+ 24 - 0
coriolis/providers/factory.py

@@ -0,0 +1,24 @@
+from coriolis import constants
+from coriolis import exception
+from coriolis.providers import openstack
+from coriolis.providers import vmware_vsphere
+
+
+EXPORT_PROVIDERS = {
+    "vmware_vsphere": vmware_vsphere.ExportProvider
+}
+
+IMPORT_PROVIDERS = {
+    "openstack": openstack.ImportProvider
+}
+
+
+def get_provider(name, provider_type):
+    if provider_type == constants.PROVIDER_TYPE_EXPORT:
+        cls = EXPORT_PROVIDERS.get(name)
+    elif provider_type == constants.PROVIDER_TYPE_IMPORT:
+        cls = IMPORT_PROVIDERS.get(name)
+
+    if not cls:
+        raise exception.NotFound("Provider not found: %s" % name)
+    return cls()

+ 10 - 0
coriolis/providers/openstack/__init__.py

@@ -0,0 +1,10 @@
+from coriolis.providers import base
+
+
+class ImportProvider(base.BaseExportProvider):
+    def validate_connection_info(self, connection_info):
+        return True
+
+    def import_instance(self, connection_info, target_environment,
+                        instance_name, export_info):
+        pass

+ 249 - 0
coriolis/providers/vmware_vsphere/__init__.py

@@ -0,0 +1,249 @@
+import os
+import sys
+import time
+from urllib import request
+
+import eventlet
+from oslo_config import cfg
+from oslo_log import log as logging
+from pyVim import connect
+from pyVmomi import vim
+
+from coriolis.providers import base
+from coriolis import utils
+
+vmware_vsphere_opts = [
+    cfg.StrOpt('vdiskmanager_path',
+               default='vmware-vdiskmanager',
+               help='The vmware-vdiskmanager path.'),
+]
+
+CONF = cfg.CONF
+CONF.register_opts(vmware_vsphere_opts, 'vmware_vsphere')
+
+LOG = logging.getLogger(__name__)
+
+
+class ExportProvider(base.BaseExportProvider):
+    def validate_connection_info(self, connection_info):
+        return True
+
+    def _convert_disk_type(self, disk_path, target_disk_path, target_type=0):
+        utils.exec_process([CONF.vmware_vsphere.vdiskmanager_path, "-r",
+                            disk_path, "-t", str(target_type),
+                            target_disk_path])
+
+    def _wait_for_task(self, task):
+        while task.info.state not in [vim.TaskInfo.State.success,
+                                      vim.TaskInfo.State.error]:
+            time.sleep(.1)
+        if task.info.state == vim.TaskInfo.State.error:
+            raise Exception(task.info.error.msg)
+
+    def export_instance(self, connection_info, instance_name, export_path):
+        host = connection_info["host"]
+        port = connection_info.get("port", 443)
+        username = connection_info["username"]
+        password = connection_info["password"]
+        allow_untrusted = connection_info.get("allow_untrusted", False)
+
+        # pyVmomi locks otherwise
+        sys.modules['socket'] = eventlet.patcher.original('socket')
+        ssl = eventlet.patcher.original('ssl')
+
+        context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+        if allow_untrusted:
+            context.verify_mode = ssl.CERT_NONE
+
+        LOG.info("Connecting to: %s:%s" % (host, port))
+
+        si = connect.SmartConnect(
+            host=host,
+            user=username,
+            pwd=password,
+            port=port,
+            sslContext=context)
+
+        LOG.info("Retrieving data for VM: %s" % instance_name)
+
+        # TODO: provide path selection
+        datacenter = si.content.rootFolder.childEntity[0]
+        vms = datacenter.vmFolder.childEntity
+        vm = [vm for vm in vms if vm.name == instance_name][0]
+
+        firmware_type_map = {
+            vim.vm.GuestOsDescriptor.FirmwareType.bios: 'BIOS',
+            vim.vm.GuestOsDescriptor.FirmwareType.efi: 'EFI'}
+
+        vm_info = {
+            'num_cpu': vm.config.hardware.numCPU,
+            'num_cores_per_socket': vm.config.hardware.numCoresPerSocket,
+            'memory_mb':  vm.config.hardware.memoryMB,
+            'firmware_type':  firmware_type_map[vm.config.firmware],
+            'nested_virtualization': vm.config.nestedHVEnabled,
+            'dynamic_memory_enabled':
+                not vm.config.memoryReservationLockedToMax,
+            'name': vm.config.name,
+            'guest_id': vm.config.guestId,
+            'id': vm._moId,
+        }
+
+        LOG.info("vm info: %s" % str(vm_info))
+
+        if vm.runtime.powerState != vim.VirtualMachinePowerState.poweredOff:
+            if (vm.guest.toolsRunningStatus !=
+                    vim.vm.GuestInfo.ToolsRunningStatus.guestToolsNotRunning):
+                vm.ShutdownGuest()
+            else:
+                task = vm.PowerOff()
+                _wait_for_task(task)
+
+        disk_ctrls = []
+        devices = [d for d in vm.config.hardware.device if
+                   isinstance(d, vim.vm.device.VirtualController)]
+        for device in devices:
+            ctrl_type = None
+            if isinstance(device, vim.vm.device.VirtualPCIController):
+                ctrl_type = "PCI"
+            elif isinstance(device, vim.vm.device.VirtualSIOController):
+                ctrl_type = "SIO"
+            elif isinstance(device, vim.vm.device.VirtualIDEController):
+                ctrl_type = "IDE"
+            elif isinstance(device, vim.vm.device.VirtualSATAController):
+                ctrl_type = "SATA"
+            elif isinstance(device, vim.vm.device.VirtualSCSIController):
+                ctrl_type = "SCSI"
+            else:
+                continue
+            disk_ctrls.append({'id': device.key, 'type': ctrl_type,
+                               'bus_number': device.busNumber})
+
+        disks = []
+        devices = [d for d in vm.config.hardware.device if
+                   isinstance(d, vim.vm.device.VirtualDisk)]
+        for device in devices:
+            disks.append({'size': device.capacityInBytes,
+                          'address': device.unitNumber,
+                          'id': device.key,
+                          'controller_id': device.controllerKey})
+
+        cdroms = []
+        devices = [d for d in vm.config.hardware.device if
+                   isinstance(d, vim.vm.device.VirtualCdrom)]
+        for device in devices:
+            cdroms.append({'address': device.unitNumber, 'id': device.key,
+                           'controller_id': device.controllerKey})
+
+        floppy = []
+        devices = [d for d in vm.config.hardware.device if
+                   isinstance(d, vim.vm.device.VirtualFloppy)]
+        for device in devices:
+            floppy.append({'address': device.unitNumber, 'id': device.key,
+                           'controller_id': device.controllerKey})
+
+        nics = []
+        devices = [d for d in vm.config.hardware.device if
+                   isinstance(d, vim.vm.device.VirtualEthernetCard)]
+        for device in devices:
+            nics.append({'mac_address': device.macAddress, 'id': device.key,
+                         'name': device.deviceInfo.label,
+                         'network_id': device.backing.network.name})
+
+        serial_ports = []
+        devices = [d for d in vm.config.hardware.device if
+                   isinstance(d, vim.vm.device.VirtualSerialPort)]
+        for device in devices:
+            serial_ports.append({'id': device.key})
+
+        boot_order = []
+        for boot_device in vm.config.bootOptions.bootOrder:
+            if isinstance(boot_device, vim.vm.BootOptions.BootableDiskDevice):
+                boot_order.append({"type": "disk",
+                                   "id": boot_device.deviceKey})
+            elif isinstance(boot_device,
+                            vim.vm.BootOptions.BootableCdromDevice):
+                boot_order.append({"type": "cdrom", "id": None})
+            elif isinstance.append(boot_device,
+                                   vim.vm.BootOptions.BootableEthernetDevice):
+                boot_order.append({"type": "ethernet",
+                                   "id": boot_device.deviceKey})
+            elif isinstance(boot_device,
+                            vim.vm.BootOptions.BootableFloppyDevice):
+                boot_order.append({"type": "floppy", "id": None})
+
+        disk_paths = []
+        lease = vm.ExportVm()
+        while True:
+            if lease.state == vim.HttpNfcLease.State.ready:
+                try:
+                    tot_downloaded_bytes = 0
+                    for du in [du for du in lease.info.deviceUrl if du.disk]:
+                        # Key format: '/vm-70/VirtualLsiLogicController0:0'
+                        ctrl_str, address = du.key[
+                            du.key.rindex('/') + 1:].split(':')
+
+                        def _get_class_name(obj):
+                            return obj.__class__.__name__.split('.')[-1]
+
+                        for i, ctrl in enumerate(
+                            [d for d in vm.config.hardware.device if
+                             isinstance(
+                                d, vim.vm.device.VirtualController) and
+                                ctrl_str.startswith(_get_class_name(d))]):
+                            if int(ctrl_str[len(_get_class_name(ctrl)):]) == i:
+                                disk_key = [
+                                    d for d in vm.config.hardware.device if
+                                    isinstance(
+                                        d, vim.vm.device.VirtualDisk) and
+                                    d.controllerKey == ctrl.key][0].key
+                                break
+
+                        response = request.urlopen(du.url, context=context)
+                        path = os.path.join(export_path, du.targetId)
+                        disk_paths.append({'path': path, 'id': disk_key})
+
+                        LOG.info("Downloading: %s" % path)
+                        with open(path, 'wb') as f:
+                            while True:
+                                chunk = response.read(1024 * 1024)
+                                if not chunk:
+                                    break
+                                tot_downloaded_bytes += len(chunk)
+                                f.write(chunk)
+                                lease.HttpNfcLeaseProgress(
+                                    int(tot_downloaded_bytes * 100 /
+                                        (lease.info.totalDiskCapacityInKB *
+                                         1024)))
+
+                    lease.HttpNfcLeaseComplete()
+                    break
+                except:
+                    lease.HttpNfcLeaseAbort()
+                    raise
+            elif lease.state == vim.HttpNfcLease.State.error:
+                raise Exception(lease.error.msg)
+            else:
+                time.sleep(.1)
+
+        connect.Disconnect(si)
+
+        for disk_path in disk_paths:
+            path = disk_path["path"]
+            LOG.info("Converting VMDK type: %s" % path)
+            tmp_path = "%s.tmp" % path
+            self._convert_disk_type(path, tmp_path)
+            os.remove(path)
+            os.rename(tmp_path, path)
+            [d for d in disks if
+             d["id"] == disk_path["id"]][0]["path"] = os.path.abspath(path)
+
+        vm_info["devices"] = {
+            "nics": nics,
+            "controllers": disk_ctrls,
+            "disks": disks,
+            "cdroms": cdroms,
+            "floppy": floppy,
+        }
+        vm_info["boot_order"] = boot_order
+
+        return vm_info

+ 132 - 0
coriolis/service.py

@@ -0,0 +1,132 @@
+import os
+
+from oslo_concurrency import processutils
+from oslo_config import cfg
+from oslo_log import log as logging
+import oslo_messaging as messaging
+from oslo_service import service
+from oslo_service import wsgi
+
+from coriolis import utils
+
+
+service_opts = [
+    cfg.StrOpt('api_migration_listen',
+               default="0.0.0.0",
+               help='IP address on which the Migration API listens'),
+    cfg.PortOpt('api_migration_listen_port',
+                default=7667,
+                help='Port on which the Migration API listens'),
+    cfg.IntOpt('api_migration_workers',
+               help='Number of workers for the Migration API service. '
+                    'The default is equal to the number of CPUs available.'),
+    cfg.StrOpt('messaging_transport_url',
+               default="rabbit://guest:guest@127.0.0.1:5672/",
+               help='Messaging transport url'),
+    cfg.IntOpt('messaging_workers',
+               help='Number of workers for the messaging service. '
+                    'The default is equal to the number of CPUs available.'),
+]
+
+CONF = cfg.CONF
+CONF.register_opts(service_opts)
+LOG = logging.getLogger(__name__)
+
+
+class WSGIService(service.ServiceBase):
+    def __init__(self, name):
+        self._host = CONF.api_migration_listen
+        self._port = CONF.api_migration_listen_port
+        self._workers = (CONF.api_migration_workers or
+                         processutils.get_worker_count())
+
+        self._loader = wsgi.Loader(CONF)
+        self._app = self._loader.load_app(name)
+
+        self._server = wsgi.Server(CONF,
+                                   name,
+                                   self._app,
+                                   host=self._host,
+                                   port=self._port)
+
+    def get_workers_count(self):
+        return self._workers
+
+    def start(self):
+        self._server.start()
+
+    def stop(self):
+        self._server.stop()
+
+    def wait(self):
+        self._server.wait()
+
+    def reset(self):
+        self._server.reset()
+
+
+class MessagingService(service.ServiceBase):
+    def __init__(self, topic, endpoints, version):
+        target = messaging.Target(
+            topic=topic, server=utils.get_hostname(), version=version)
+        transport = messaging.get_transport(CONF, CONF.messaging_transport_url)
+
+        self._server = messaging.get_rpc_server(
+            transport, target, endpoints, executor='eventlet')
+
+        self._workers = (CONF.messaging_workers or
+                         processutils.get_worker_count())
+
+    def get_workers_count(self):
+        return self._workers
+
+    def start(self):
+        self._server.start()
+
+    def stop(self):
+        self._server.stop()
+
+    def wait(self):
+        pass
+
+    def reset(self):
+        self._server.reset()
+
+
+def get_process_launcher():
+    return service.ProcessLauncher(CONF)
+
+'''
+_launcher = None
+
+def serve(server, workers=None):
+    global _launcher
+    if _launcher:
+        raise RuntimeError(_('serve() can only be called once'))
+
+    _launcher = service.launch(CONF, server, workers=workers)
+
+
+def wait():
+    try:
+        _launcher.wait()
+    except KeyboardInterrupt:
+        _launcher.stop()
+
+
+class Launcher(object):
+    def __init__(self):
+        self.launch_service = serve
+        self.wait = wait
+
+
+def get_process_launcher():
+    # Note(lpetrut): ProcessLauncher uses green pipes which fail on Windows
+    # due to missing support of non-blocking I/O pipes. For this reason, the
+    # service must be spawned differently on Windows, using the ServiceLauncher
+    # class instead.
+    if os.name == 'nt':
+        return Launcher()
+    else:
+        return service.ProcessLauncher(CONF)
+'''

+ 16 - 0
coriolis/utils.py

@@ -0,0 +1,16 @@
+import socket
+import subprocess
+
+
+def exec_process(args):
+    p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    std_out, std_err = p.communicate()
+    if p.returncode:
+        raise Exception(
+            "Command \"%s\" failed with exit code: %s\nstdout: %s\nstd_err: %s"
+            % (args, p.returncode, std_out, std_err))
+    return std_out
+
+
+def get_hostname():
+    return socket.gethostname()

+ 0 - 0
coriolis/worker/__init__.py


+ 0 - 0
coriolis/worker/rpc/__init__.py


+ 32 - 0
coriolis/worker/rpc/client.py

@@ -0,0 +1,32 @@
+from oslo_config import cfg
+import oslo_messaging as messaging
+
+CONF = cfg.CONF
+CONF.import_opt("messaging_transport_url", "coriolis.service")
+
+VERSION = "1.0"
+
+
+class WorkerClient(object):
+    def __init__(self):
+        target = messaging.Target(topic='coriolis_worker', version=VERSION)
+        transport = messaging.get_transport(
+            cfg.CONF, CONF.messaging_transport_url)
+        self._client = messaging.RPCClient(transport, target)
+
+    def begin_export_instance(self, ctxt, operation_id, origin, instance):
+        self._client.cast(
+            ctxt, 'export_instance', operation_id=operation_id, origin=origin,
+            instance=instance)
+
+    def begin_import_instance(self, ctxt, server, operation_id, destination,
+                              instance, export_info):
+        # Needs to be executed on the same server
+        cctxt = self._client.prepare(server=server)
+        cctxt.cast(
+            ctxt, 'import_instance', operation_id=operation_id,
+            destination=destination, instance=instance,
+            export_info=export_info)
+
+    def update_migration_status(self, ctx, operation_id, status):
+        self._client.call(ctxt, "update_migration_status", status=status)

+ 76 - 0
coriolis/worker/rpc/server.py

@@ -0,0 +1,76 @@
+import os
+
+from oslo_config import cfg
+from oslo_log import log as logging
+import oslo_messaging as messaging
+
+from coriolis.conductor.rpc import client as rpc_conductor_client
+from coriolis import constants
+from coriolis.providers import factory
+from coriolis import utils
+
+worker_opts = [
+    cfg.StrOpt('export_base_path',
+               default='/tmp',
+               help='The path used for hosting exported disks.'),
+]
+
+CONF = cfg.CONF
+CONF.register_opts(worker_opts, 'worker')
+
+
+LOG = logging.getLogger(__name__)
+
+VERSION = "1.0"
+
+
+class WorkerServerEndpoint(object):
+    def __init__(self):
+        self._server = utils.get_hostname()
+        self._rpc_conductor_client = rpc_conductor_client.ConductorClient()
+
+    def _get_operation_export_path(self, operation_id):
+        path = os.path.join(CONF.worker.export_base_path, operation_id)
+        if not os.path.exists(path):
+            os.makedirs(path)
+        return path
+
+    def export_instance(self, ctxt, operation_id, origin, instance):
+        self._rpc_conductor_client.set_operation_host(
+            ctxt, operation_id, self._server)
+
+        try:
+            export_provider = factory.get_provider(
+                origin["type"], constants.PROVIDER_TYPE_EXPORT)
+            export_path = self._get_operation_export_path(operation_id)
+            vm_info = export_provider.export_instance(
+                origin["connection_info"], instance, export_path)
+            LOG.info("Exported VM: %s" % vm_info)
+
+            self._rpc_conductor_client.export_completed(
+                ctxt, operation_id, vm_info)
+        except Exception as ex:
+            LOG.exception(ex)
+            # TODO: set error state
+            # self._rpc_conductor_client.set_operation_error(ctxt,
+            # operation_id, ex)
+
+    def import_instance(self, ctxt, operation_id, destination, instance,
+                        export_info):
+        self._rpc_conductor_client.set_operation_host(
+            ctxt, operation_id, self._server)
+
+        try:
+            import_provider = factory.get_provider(
+                destination["type"], constants.PROVIDER_TYPE_IMPORT)
+            import_provider.import_instance(
+                destination["connection_info"],
+                destination["target_environment"],
+                instance, export_info)
+
+            self._rpc_conductor_client.import_completed(ctxt, operation_id)
+        except Exception as ex:
+            LOG.exception(ex)
+            # TODO: set error state
+            # self._rpc_conductor_client.set_operation_error(
+            # ctxt, operation_id, ex)

+ 5 - 0
etc/coriolis/api-paste.ini

@@ -0,0 +1,5 @@
+[pipeline:osapi_migration]
+pipeline = apiv1
+
+[app:apiv1]
+paste.app_factory = coriolis.api.v1.router:APIRouter.factory

+ 4 - 0
etc/coriolis/coriolis.conf

@@ -0,0 +1,4 @@
+[DEFAULT]
+log_dir=/tmp
+log_file=coriolis.log
+messaging_transport_url=rabbit://coriolis:Passw0rd@127.0.0.1:5672/