Просмотр исходного кода

tests: switch from sqlite to mariadb container

sqlite doesn't allow concurrent writes, we'll switch to a proper
database. This also allows us to remove some workarounds.
Lucian Petrut 2 недель назад
Родитель
Сommit
e81fac772e

+ 1 - 1
coriolis/db/sqlalchemy/migrate_repo/versions/023_add_deployer_id.py

@@ -6,7 +6,7 @@ def upgrade(migrate_engine):
     meta.bind = migrate_engine
 
     deployment = sqlalchemy.Table(
-        'deployment', meta, autoload=True ,
+        'deployment', meta, autoload=True,
         mysql_engine="InnoDB",
         mysql_charset="utf8")
     deployer_id = sqlalchemy.Column(

+ 0 - 1
coriolis/tests/integration/base.py

@@ -50,7 +50,6 @@ class CoriolisIntegrationTestBase(test_base.CoriolisBaseTestCase):
         super().setUpClass()
         cls._harness = harness._IntegrationHarness.get()
         cls._workdir = cls._harness.workdir
-        cls._db_path = cls._harness.db_path
         cls._lock_path = cls._harness.lock_path
         cls._api_port = cls._harness.api_port
         cls._client = cls.get_client()

+ 64 - 54
coriolis/tests/integration/harness.py

@@ -23,8 +23,8 @@ import queue
 import shutil
 import socket
 import tempfile
-import threading
 from unittest import mock
+import uuid
 
 from cheroot.workers import threadpool as cheroot_threadpool
 from cheroot import wsgi as cheroot_wsgi
@@ -32,7 +32,6 @@ from oslo_config import cfg
 from oslo_log import log as logging
 from oslo_middleware import request_id as request_id_middleware
 from oslo_service import wsgi as base_wsgi
-from oslo_utils import timeutils
 import webob.dec
 
 from coriolis import api as api_module
@@ -46,7 +45,6 @@ from coriolis import context
 from coriolis.db import api as db_api
 from coriolis.db.sqlalchemy import api as sqlalchemy_api
 from coriolis.db.sqlalchemy import migration as db_migration
-from coriolis.db.sqlalchemy import models
 from coriolis import exception
 from coriolis import policy as policy_module
 from coriolis import rpc as rpc_module
@@ -83,7 +81,8 @@ class DaemonCherootWorker(cheroot_threadpool.WorkerThread):
         #
         # Possible alternatives if this becomes a problem:
         #   * Use threading._register_atexit instead of atexit
-        #     * may become public: https://github.com/python/cpython/issues/86128
+        #     * may become public:
+        #       https://github.com/python/cpython/issues/86128
         #   * Cleanup the harness in tearDownClass
         #     * we tried to avoid spinning up the services for every test class
         #   * Move the api services to a separate process
@@ -194,39 +193,6 @@ class _InProcessWorkerServerEndpoint(worker_rpc_server.WorkerServerEndpoint):
         return result
 
 
-def _sqlite_delete_transfer_action(context, cls, id):
-    """Per-object soft-delete workaround for SQLite.
-
-    SQLite does not support the bulk UPDATE ... FROM ... statement that
-    oslo.db's Query.soft_delete() generates for joined-table-inheritance
-    models (Transfer / Deployment -> BaseTransferAction).
-
-    This replacement iterates over objects individually and is used only when
-    integration tests run against SQLite.
-    """
-    args = {"base_id": id}
-    if db_api.is_user_context(context):
-        args["project_id"] = context.project_id
-
-    session = db_api._session(context)
-    now = timeutils.utcnow()
-    objs = db_api._soft_delete_aware_query(
-        context, cls).filter_by(**args).all()
-    if not objs:
-        raise exception.NotFound("0 entries were soft deleted")
-
-    for obj in objs:
-        obj.deleted_at = now
-        obj.deleted = 1
-        obj.save(session=session)
-
-    for execution in db_api._soft_delete_aware_query(
-            context, models.TasksExecution).filter_by(action_id=id).all():
-        execution.deleted_at = now
-        execution.deleted = 1
-        execution.save(session=session)
-
-
 class _IntegrationHarness:
     """Shared Integration tests infrastructure; created once per process.
 
@@ -246,18 +212,31 @@ class _IntegrationHarness:
 
     def __init__(self):
         self.workdir = tempfile.mkdtemp(prefix="coriolis-integration-")
-        self.db_path = os.path.join(self.workdir, "test.db")
         self.lock_path = os.path.join(self.workdir, "locks")
         os.makedirs(self.lock_path)
 
+        self._mysql_container_name = "coriolis-test-mysql-%s" % str(
+            uuid.uuid4()).split("-")[0]
+        self._mysql_username = "root"
+        self._mysql_password = "coriolis"
+        self._mysql_database = "coriolis"
+
         coriolis_conf.init_common_opts()
         cfg.CONF([], project='coriolis', version='1.0.0',
                  default_config_files=[], default_config_dirs=[])
         cfg.CONF.set_override('messaging_transport_url', 'fake://')
         cfg.CONF.set_override(
             'providers', [_TEST_EXPORT_PROVIDER, _TEST_IMPORT_PROVIDER])
+        db_url = ('mysql+pymysql://%(user)s:%(password)s'
+                  '@localhost:3306/%(database)s') % {
+            "user": self._mysql_username,
+            "password": self._mysql_password,
+            "database": self._mysql_database,
+        }
+        cfg.CONF.set_override(
+            'connection', db_url, group='database')
         cfg.CONF.set_override(
-            'connection', 'sqlite:///%s' % self.db_path, group='database')
+            'retry_interval', 1, group='database')
         cfg.CONF.set_override(
             'lock_path', self.lock_path, group='oslo_concurrency')
         coriolis_utils.setup_logging()
@@ -266,19 +245,6 @@ class _IntegrationHarness:
         # Policy enforcer: reset so it re-reads the new CONF (no policy file).
         policy_module.reset()
 
-        # SQLAlchemy facade and RPC transport are module-level singletons;
-        # reset them so they are re-created from the new CONF values.
-        sqlalchemy_api._facade = None
-        rpc_module._TRANSPORT = None
-
-        engine = db_api.get_engine()
-        db_migration.db_sync(engine)
-
-        # SQLite does not support bulk UPDATE ... FROM ... for
-        # joined-table-inheritance models; replace the production function
-        # with a per-object alternative for the lifetime of this process.
-        db_api._delete_transfer_action = _sqlite_delete_transfer_action
-
         self._wsgi_server = None
         self._wsgi_server_thread = None
         self.api_port = None
@@ -286,12 +252,39 @@ class _IntegrationHarness:
         self._scheduler_svc = None
         self._worker_svc = None
         self._worker_host_svc = None
-        self._start_services()
+
+        # SQLAlchemy facade and RPC transport are module-level singletons;
+        # reset them so they are re-created from the new CONF values.
+        sqlalchemy_api._facade = None
+        rpc_module._TRANSPORT = None
 
         atexit.register(self._teardown)
 
-    def _start_services(self):
+        self._start_db_container()
+
+        engine = db_api.get_engine()
+        db_migration.db_sync(engine)
+
+        self._start_coriolis_services()
+
+    def _start_db_container(self):
+        coriolis_utils.exec_process(
+            [
+                "docker",
+                "run",
+                "-d",
+                "--name",
+                self._mysql_container_name,
+                "-e",
+                f"MYSQL_ROOT_PASSWORD={self._mysql_password}",
+                "-e", f"MYSQL_DATABASE={self._mysql_database}",
+                "-p", "3306:3306",
+                "mariadb:10-jammy",
+            ])
+
+    def _start_coriolis_services(self):
         """Start conductor, scheduler, worker, and API in-process."""
+
         rpc_module.init()
 
         # Conductor: must start first so the worker can register with it.
@@ -376,6 +369,23 @@ class _IntegrationHarness:
 
     def _teardown(self):
         LOG.info("Teardown initiated.")
+
+        try:
+            coriolis_utils.exec_process(
+                [
+                    "docker",
+                    "stop",
+                    self._mysql_container_name
+                ])
+            coriolis_utils.exec_process(
+                [
+                    "docker",
+                    "rm",
+                    self._mysql_container_name
+                ])
+        except Exception:
+            pass
+
         for svc in [self._worker_host_svc, self._worker_svc,
                     self._scheduler_svc, self._conductor_svc]:
             if not svc: