#1487 Make db_session singleton
Closed 4 years ago by cqi. Opened 4 years ago by cqi.
cqi/fm-orchestrator refine-db-session  into  v3

@@ -20,7 +20,8 @@ 

  import koji

  import pungi.arch

  

- from module_build_service import conf, log, build_logs, models, Modulemd

+ from module_build_service import conf, log, build_logs, Modulemd

+ from module_build_service.db_session import db_session

  from module_build_service.scm import SCM

  from module_build_service.utils import to_text_type, load_mmd, mmd_to_str

  
@@ -777,8 +778,7 @@ 

  

          log_path = os.path.join(prepdir, "build.log")

          try:

-             with models.make_db_session(conf) as db_session:

-                 source = build_logs.path(db_session, self.module)

+             source = build_logs.path(db_session, self.module)

              log.info("Moving logs from %r to %r" % (source, log_path))

              shutil.copy(source, log_path)

          except IOError as e:

@@ -25,6 +25,7 @@ 

  import module_build_service.utils

  from module_build_service.builder.utils import execute_cmd

  from module_build_service.builder.koji_backports import ClientSession as KojiClientSession

+ from module_build_service.db_session import db_session

  from module_build_service.errors import ProgrammingError

  

  from module_build_service.builder.base import GenericBuilder
@@ -159,7 +160,7 @@ 

          log.debug("Using koji_config: %s" % config.koji_config)

  

          self.koji_session = self.get_session(config)

-         self.arches = [arch.name for arch in self.module.arches]

+         self.arches = sorted(arch.name for arch in self.module.arches)

  

          # Allow KojiModuleBuilder to be initialized if no arches are set but the module is in

          # a failed set. This will allow the clean up of old module builds.
@@ -228,63 +229,61 @@ 

          """

          # filtered_rpms will contain the NVRs of non-reusable component's RPMs

          filtered_rpms = list(set(filtered_rpms_of_dep))

-         with models.make_db_session(conf) as db_session:

-             # Get a module build that can be reused, which will likely be the

-             # build dep that is used since it relies on itself

-             reusable_module = get_reusable_module(db_session, module_build)

-             if not reusable_module:

-                 return filtered_rpms

-             koji_session = KojiModuleBuilder.get_session(conf, login=False)

-             # Get all the RPMs and builds of the reusable module in Koji

-             rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)

-             # Convert the list to a dict where each key is the build_id

-             builds = {build["build_id"]: build for build in builds}

-             # Create a mapping of package (SRPM) to the RPMs in NVR format

-             package_to_rpms = {}

-             for rpm in rpms:

-                 package = builds[rpm["build_id"]]["name"]

-                 package_to_rpms.setdefault(package, []).append(kobo.rpmlib.make_nvr(rpm))

- 

-             components_in_module = [c.package for c in module_build.component_builds]

-             reusable_components = get_reusable_components(

-                 db_session,

-                 module_build,

-                 components_in_module,

-                 previous_module_build=reusable_module,

-             )

-             # Loop through all the reusable components to find if any of their RPMs are

-             # being filtered

-             for reusable_component in reusable_components:

-                 # reusable_component will be None if the component can't be reused

-                 if not reusable_component:

-                     continue

-                 # We must get the component name from the NVR and not from

-                 # reusable_component.package because macros such as those used

-                 # by SCLs can change the name of the underlying build

-                 component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]

+         # Get a module build that can be reused, which will likely be the

+         # build dep that is used since it relies on itself

+         reusable_module = get_reusable_module(module_build)

+         if not reusable_module:

+             return filtered_rpms

+         koji_session = KojiModuleBuilder.get_session(conf, login=False)

+         # Get all the RPMs and builds of the reusable module in Koji

+         rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)

+         # Convert the list to a dict where each key is the build_id

+         builds = {build["build_id"]: build for build in builds}

+         # Create a mapping of package (SRPM) to the RPMs in NVR format

+         package_to_rpms = {}

+         for rpm in rpms:

+             package = builds[rpm["build_id"]]["name"]

+             package_to_rpms.setdefault(package, []).append(kobo.rpmlib.make_nvr(rpm))

+ 

+         components_in_module = [c.package for c in module_build.component_builds]

+         reusable_components = get_reusable_components(

+             module_build,

+             components_in_module,

+             previous_module_build=reusable_module,

+         )

+         # Loop through all the reusable components to find if any of their RPMs are

+         # being filtered

+         for reusable_component in reusable_components:

+             # reusable_component will be None if the component can't be reused

+             if not reusable_component:

+                 continue

+             # We must get the component name from the NVR and not from

+             # reusable_component.package because macros such as those used

+             # by SCLs can change the name of the underlying build

+             component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]

  

-                 if component_name not in package_to_rpms:

-                     continue

+             if component_name not in package_to_rpms:

+                 continue

  

-                 # Loop through the RPMs associated with the reusable component

-                 for nvr in package_to_rpms[component_name]:

-                     parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

+             # Loop through the RPMs associated with the reusable component

+             for nvr in package_to_rpms[component_name]:

+                 parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

+                 # Don't compare with the epoch

+                 parsed_nvr["epoch"] = None

+                 # Loop through all the filtered RPMs to find a match with the reusable

+                 # component's RPMs.

+                 for nvr2 in list(filtered_rpms):

+                     parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)

                      # Don't compare with the epoch

-                     parsed_nvr["epoch"] = None

-                     # Loop through all the filtered RPMs to find a match with the reusable

-                     # component's RPMs.

-                     for nvr2 in list(filtered_rpms):

-                         parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)

-                         # Don't compare with the epoch

-                         parsed_nvr2["epoch"] = None

-                         # Only remove the filter if we are going to reuse a component with

-                         # the same exact NVR

-                         if parsed_nvr == parsed_nvr2:

-                             filtered_rpms.remove(nvr2)

-                             # Since filtered_rpms was cast to a set and then back

-                             # to a list above, we know there won't be duplicate RPMS,

-                             # so we can just break here.

-                             break

+                     parsed_nvr2["epoch"] = None

+                     # Only remove the filter if we are going to reuse a component with

+                     # the same exact NVR

+                     if parsed_nvr == parsed_nvr2:

+                         filtered_rpms.remove(nvr2)

+                         # Since filtered_rpms was cast to a set and then back

+                         # to a list above, we know there won't be duplicate RPMS,

+                         # so we can just break here.

+                         break

          return filtered_rpms

  

      @staticmethod
@@ -1280,18 +1279,17 @@ 

          :param Modulemd mmd: Modulemd to get the built RPMs from.

          :return: list of NVRs

          """

-         with models.make_db_session(conf) as db_session:

-             build = models.ModuleBuild.get_build_from_nsvc(

-                 db_session,

-                 mmd.get_module_name(),

-                 mmd.get_stream_name(),

-                 mmd.get_version(),

-                 mmd.get_context()

-             )

-             koji_session = KojiModuleBuilder.get_session(conf, login=False)

-             rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

-             nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

-             return list(nvrs)

+         build = models.ModuleBuild.get_build_from_nsvc(

+             db_session,

+             mmd.get_module_name(),

+             mmd.get_stream_name(),

+             mmd.get_version(),

+             mmd.get_context()

+         )

+         koji_session = KojiModuleBuilder.get_session(conf, login=False)

+         rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

+         nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

+         return list(nvrs)

  

      def finalize(self, succeeded=True):

          # Only import to koji CG if the module is "build" and not scratch.

@@ -24,6 +24,7 @@ 

      get_koji_config,

  )

  from module_build_service.utils.general import mmd_to_str

+ from module_build_service.db_session import db_session

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  

  from module_build_service import models
@@ -561,22 +562,21 @@ 

          :param Modulemd mmd: Modulemd to get the built RPMs from.

          :return: list of NVRs

          """

-         with models.make_db_session(conf) as db_session:

-             build = models.ModuleBuild.get_build_from_nsvc(

-                 db_session,

-                 mmd.get_module_name(),

-                 mmd.get_stream_name(),

-                 mmd.get_version(),

-                 mmd.get_context()

-             )

-             if build.koji_tag.startswith("repofile://"):

-                 # Modules from local repository have already the RPMs filled in mmd.

-                 return mmd.get_rpm_artifacts()

-             else:

-                 koji_session = KojiModuleBuilder.get_session(conf, login=False)

-                 rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

-                 nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

-                 return list(nvrs)

+         build = models.ModuleBuild.get_build_from_nsvc(

+             db_session,

+             mmd.get_module_name(),

+             mmd.get_stream_name(),

+             mmd.get_version(),

+             mmd.get_context()

+         )

+         if build.koji_tag.startswith("repofile://"):

+             # Modules from local repository have already the RPMs filled in mmd.

+             return mmd.get_rpm_artifacts()

+         else:

+             koji_session = KojiModuleBuilder.get_session(conf, login=False)

+             rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

+             nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

+             return list(nvrs)

  

  

  class BaseBuilder(object):

@@ -0,0 +1,76 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import sqlalchemy.event

+ 

+ from sqlalchemy.orm import scoped_session, sessionmaker

+ from sqlalchemy.pool import NullPool

+ 

+ from module_build_service import conf

+ from module_build_service.models import session_before_commit_handlers

+ 

+ __all__ = ("db_session",)

+ 

+ 

+ def _setup_event_listeners(db_session):

+     """

+     Starts listening for events related to database session.

+     """

+     if not sqlalchemy.event.contains(db_session, "before_commit", session_before_commit_handlers):

+         sqlalchemy.event.listen(db_session, "before_commit", session_before_commit_handlers)

+ 

+     # initialize DB event listeners from the monitor module

+     from module_build_service.monitor import db_hook_event_listeners

+ 

+     db_hook_event_listeners(db_session.bind.engine)

+ 

+ 

+ def apply_engine_options(conf):

+     options = {

+         "configuration": {"sqlalchemy.url": conf.sqlalchemy_database_uri},

+     }

+     if conf.sqlalchemy_database_uri.startswith("sqlite://"):

+         options.update({

+             # For local module build, MBS is actually a multi-threaded

+             # application. The command submitting a module build runs in its

+             # own thread, and the backend build workflow, implemented as a

+             # fedmsg consumer on top of fedmsg-hub, runs in separate threads.

+             # So, disable this option in order to allow accessing data which

+             # was written from another thread.

+             "connect_args": {'check_same_thread': False},

+ 

+             # Both local module build and running tests requires a file-based

+             # SQLite database, we do not use a connection pool for these two

+             # scenario.

+             "poolclass": NullPool,

+         })

+     else:

+         # TODO - we could use ZopeTransactionExtension() here some day for

+         # improved safety on the backend.

+         pool_options = {}

+ 

+         # Apply pool options SQLALCHEMY_* set in MBS config.

+         # Abbrev sa stands for SQLAlchemy.

+         def apply_mbs_option(mbs_config_key, sa_config_key):

+             value = getattr(conf, mbs_config_key, None)

+             if value is not None:

+                 pool_options[sa_config_key] = value

+ 

+         apply_mbs_option("sqlalchemy_pool_size", "pool_size")

+         apply_mbs_option("sqlalchemy_pool_timeout", "pool_timeout")

+         apply_mbs_option("sqlalchemy_pool_recycle", "pool_recycle")

+         apply_mbs_option("sqlalchemy_max_overflow", "max_overflow")

+ 

+         options.update(pool_options)

+ 

+     return options

+ 

+ 

+ engine_opts = apply_engine_options(conf)

+ engine = sqlalchemy.engine_from_config(**engine_opts)

+ session_factory = sessionmaker(bind=engine)

+ 

+ # This is the global, singleton database Session that could be used to operate

+ # database queries.

+ db_session = scoped_session(session_factory)

+ _setup_event_listeners(db_session)

file modified
+47 -45
@@ -19,6 +19,7 @@ 

      import_mmd,

      import_builds_from_local_dnf_repos,

  )

+ from module_build_service.db_session import db_session

  from module_build_service.errors import StreamAmbigous

  import module_build_service.messaging

  import module_build_service.scheduler.consumer
@@ -157,37 +158,37 @@ 

  

      yaml_file_path = os.path.abspath(yaml_file)

  

-     with models.make_db_session(conf) as db_session:

-         if offline:

-             import_builds_from_local_dnf_repos(db_session, platform_id)

-         load_local_builds(db_session, local_build_nsvs)

- 

-         with open(yaml_file_path) as fd:

-             filename = os.path.basename(yaml_file)

-             handle = FileStorage(fd)

-             handle.filename = filename

-             try:

-                 module_builds = submit_module_build_from_yaml(

-                     db_session, username, handle, params,

-                     stream=str(stream), skiptests=skiptests

-                 )

-             except StreamAmbigous as e:

-                 logging.error(str(e))

-                 logging.error("Use '-s module_name:module_stream' to choose the stream")

-                 return

- 

-             module_build_ids = [build.id for build in module_builds]

- 

-         stop = module_build_service.scheduler.make_simple_stop_condition(db_session)

+     from module_build_service.db_session import db_session

+ 

+     if offline:

+         import_builds_from_local_dnf_repos(platform_id)

+     load_local_builds(local_build_nsvs)

+ 

+     with open(yaml_file_path) as fd:

+         filename = os.path.basename(yaml_file)

+         handle = FileStorage(fd)

+         handle.filename = filename

+         try:

+             module_builds = submit_module_build_from_yaml(

+                 db_session, username, handle, params,

+                 stream=str(stream), skiptests=skiptests

+             )

+         except StreamAmbigous as e:

+             logging.error(str(e))

+             logging.error("Use '-s module_name:module_stream' to choose the stream")

+             return

+ 

+         module_build_ids = [build.id for build in module_builds]

+ 

+     stop = module_build_service.scheduler.make_simple_stop_condition()

  

      # Run the consumer until stop_condition returns True

      module_build_service.scheduler.main([], stop)

  

-     with models.make_db_session(conf) as db_session:

-         has_failed_module = db_session.query(models.ModuleBuild).filter(

-             models.ModuleBuild.id.in_(module_build_ids),

-             models.ModuleBuild.state == models.BUILD_STATES["failed"],

-         ).count() > 0

+     has_failed_module = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.id.in_(module_build_ids),

+         models.ModuleBuild.state == models.BUILD_STATES["failed"],

+     ).count() > 0

  

      if has_failed_module:

          raise RuntimeError("Module build failed")
@@ -221,28 +222,29 @@ 

      if len(parts) >= 4:

          filter_by_kwargs["context"] = parts[3]

  

-     with models.make_db_session(conf) as db_session:

-         # Find module builds to retire

-         module_builds = db_session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all()

+     # Find module builds to retire

+     module_builds = db_session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all()

  

-         if not module_builds:

-             logging.info("No module builds found.")

-             return

+     if not module_builds:

+         logging.info("No module builds found.")

+         return

  

-         logging.info("Found %d module builds:", len(module_builds))

-         for build in module_builds:

-             logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context)))

+     logging.info("Found %d module builds:", len(module_builds))

+     for build in module_builds:

+         logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context)))

  

-         # Prompt for confirmation

-         is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds)))

-         if not is_confirmed:

-             logging.info("Module builds were NOT retired.")

-             return

+     # Prompt for confirmation

+     is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds)))

+     if not is_confirmed:

+         logging.info("Module builds were NOT retired.")

+         return

+ 

+     # Retire module builds

+     for build in module_builds:

+         build.transition(

+             db_session, conf, models.BUILD_STATES["garbage"], "Module build retired")

  

-         # Retire module builds

-         for build in module_builds:

-             build.transition(

-                 db_session, conf, models.BUILD_STATES["garbage"], "Module build retired")

+     db_session.commit()

  

      logging.info("Module builds retired.")

  

file modified
+1 -116
@@ -15,8 +15,7 @@ 

  import kobo.rpmlib

  from sqlalchemy import func, and_

  from sqlalchemy.orm import lazyload

- from sqlalchemy.orm import validates, scoped_session, sessionmaker, load_only

- from sqlalchemy.pool import NullPool

+ from sqlalchemy.orm import validates, load_only

  

  import module_build_service.messaging

  from module_build_service import db, log, get_url_for, conf
@@ -82,120 +81,6 @@ 

      return None

  

  

- @contextlib.contextmanager

- def _dummy_context_mgr():

-     """

-     Yields None. Used in the make_session to not duplicate the code when

-     app_context exists.

-     """

-     yield None

- 

- 

- def _setup_event_listeners(db_session):

-     """

-     Starts listening for events related to database session.

-     """

-     if not sqlalchemy.event.contains(db_session, "before_commit", session_before_commit_handlers):

-         sqlalchemy.event.listen(db_session, "before_commit", session_before_commit_handlers)

- 

-     # initialize DB event listeners from the monitor module

-     from module_build_service.monitor import db_hook_event_listeners

- 

-     db_hook_event_listeners(db_session.bind.engine)

- 

- 

- def apply_engine_options(conf):

-     options = {

-         "configuration": {"sqlalchemy.url": conf.sqlalchemy_database_uri},

-     }

-     if conf.sqlalchemy_database_uri.startswith("sqlite://"):

-         options.update({

-             # For local module build, MBS is actually a multi-threaded

-             # application. The command submitting a module build runs in its

-             # own thread, and the backend build workflow, implemented as a

-             # fedmsg consumer on top of fedmsg-hub, runs in separate threads.

-             # So, disable this option in order to allow accessing data which

-             # was written from another thread.

-             "connect_args": {'check_same_thread': False},

- 

-             # Both local module build and running tests requires a file-based

-             # SQLite database, we do not use a connection pool for these two

-             # scenario.

-             "poolclass": NullPool,

-         })

-     else:

-         # TODO - we could use ZopeTransactionExtension() here some day for

-         # improved safety on the backend.

-         pool_options = {}

- 

-         # Apply pool options SQLALCHEMY_* set in MBS config.

-         # Abbrev sa stands for SQLAlchemy.

-         def apply_mbs_option(mbs_config_key, sa_config_key):

-             value = getattr(conf, mbs_config_key, None)

-             if value is not None:

-                 pool_options[sa_config_key] = value

- 

-         apply_mbs_option("sqlalchemy_pool_size", "pool_size")

-         apply_mbs_option("sqlalchemy_pool_timeout", "pool_timeout")

-         apply_mbs_option("sqlalchemy_pool_recycle", "pool_recycle")

-         apply_mbs_option("sqlalchemy_max_overflow", "max_overflow")

- 

-         options.update(pool_options)

- 

-     return options

- 

- 

- def create_sa_session(conf):

-     """Create a SQLAlchemy session object"""

-     engine_opts = apply_engine_options(conf)

-     engine = sqlalchemy.engine_from_config(**engine_opts)

-     session = scoped_session(sessionmaker(bind=engine))()

-     return session

- 

- 

- @contextlib.contextmanager

- def make_db_session(conf):

-     """Yields new SQLAlchemy database session.

- 

-     MBS is actually a multiple threads application consisting of several

-     components. For a deployment instance, the REST API (implemented by Flask)

-     and build workflow (implemented as a fedmsg-hub consumer), which run in

-     different threads. For building a module locally, MBS runs in a similar

-     scenario, the CLI submits module build and then the build workflow starts

-     in its own thread.

- 

-     The code of REST API uses session object managed by Flask-SQLAlchemy, and

-     other components use a plain SQLAlchemy session object created by this

-     function.

- 

-     To support building a module both remotely and locally, this function

-     handles a session for both SQLite and PostgreSQL. For the scenario working

-     with SQLite, check_same_thread must be set to False so that queries are

-     allowed to access data created inside other threads.

- 

-     **Note that**: MBS uses ``autocommit=False`` mode.

-     """

-     session = create_sa_session(conf)

-     _setup_event_listeners(session)

- 

-     try:

-         # TODO: maybe this could be rewritten in an alternative way to allow

-         #       the caller to pass a flag back to indicate if all transactions

-         #       are handled already by itself, then it is not necessary to call

-         #       following commit unconditionally.

-         yield session

- 

-         # Always commit whatever there is opening transaction.

-         # FIXME: Would it be a performance issue from the database side?

-         session.commit()

-     except Exception:

-         # This is a no-op if no transaction is in progress.

-         session.rollback()

-         raise

-     finally:

-         session.close()

- 

- 

  class MBSBase(db.Model):

      # TODO -- we can implement functionality here common to all our model classes

      __abstract__ = True

@@ -7,22 +7,16 @@ 

  import os

  import tempfile

  

- from flask import Blueprint, Response

  from prometheus_client import (  # noqa: F401

      ProcessCollector,

      CollectorRegistry,

      Counter,

      multiprocess,

      Histogram,

-     generate_latest,

      start_http_server,

-     CONTENT_TYPE_LATEST,

  )

  from sqlalchemy import event

  

- # Service-specific imports

- from module_build_service.utils import cors_header, validate_api_version

- 

  

  if not os.environ.get("prometheus_multiproc_dir"):

      os.environ.setdefault("prometheus_multiproc_dir", tempfile.mkdtemp())
@@ -106,14 +100,3 @@ 

      @event.listens_for(target, "rollback")

      def receive_rollback(conn):

          db_transaction_rollback_counter.inc()

- 

- 

- monitor_api = Blueprint(

-     "monitor", __name__, url_prefix="/module-build-service/<int:api_version>/monitor")

- 

- 

- @cors_header()

- @validate_api_version()

- @monitor_api.route("/metrics")

- def metrics(api_version):

-     return Response(generate_latest(registry), content_type=CONTENT_TYPE_LATEST)

@@ -8,6 +8,8 @@ 

  import module_build_service.models

  import module_build_service.scheduler.consumer

  

+ from module_build_service.db_session import db_session

+ 

  import logging

  

  log = logging.getLogger(__name__)
@@ -46,7 +48,7 @@ 

      )

  

  

- def make_simple_stop_condition(db_session):

+ def make_simple_stop_condition():

      """ Return a simple stop_condition callable.

  

      Intended to be used with the main() function here in manage.py and tests.
@@ -71,6 +73,15 @@ 

          )

          result = module.state in done

          log.debug("stop_condition checking %r, got %r" % (module, result))

+ 

+         # moksha.hub.main starts the hub and runs it in a separate thread. When

+         # the result is True, remove the db_session from that thread local so

+         # that any pending queries in the transaction will not block other

+         # queries made from other thread.

other => another

+         # This is useful for testing particularly.

+         if result:

+             db_session.remove()

+ 

          return result

  

      return stop_condition

@@ -30,6 +30,7 @@ 

  import module_build_service.monitor as monitor

  

  from module_build_service import models, log, conf

+ from module_build_service.db_session import db_session

  from module_build_service.scheduler.handlers import greenwave

  from module_build_service.utils import module_build_state_from_msg

  
@@ -85,7 +86,7 @@ 

  

          # These are our main lookup tables for figuring out what to run in

          # response to what messaging events.

-         self.NO_OP = NO_OP = lambda config, db_session, msg: True

+         self.NO_OP = NO_OP = lambda config, msg: True

          self.on_build_change = {

              koji.BUILD_STATES["BUILDING"]: NO_OP,

              koji.BUILD_STATES[
@@ -145,8 +146,7 @@ 

  

          # Primary work is done here.

          try:

-             with models.make_db_session(conf) as db_session:

-                 self.process_message(db_session, msg)

+             self.process_message(msg)

              monitor.messaging_rx_processed_ok_counter.inc()

          except sqlalchemy.exc.OperationalError as error:

              monitor.messaging_rx_failed_counter.inc()
@@ -158,6 +158,8 @@ 

                  raise

          except Exception:

              monitor.messaging_rx_failed_counter.inc()

+         finally:

+             db_session.remove()

  

          if self.stop_condition and self.stop_condition(message):

              self.shutdown()
@@ -184,7 +186,7 @@ 

  

          all_fns = list(self.on_build_change.items()) + list(self.on_module_change.items())

          for key, callback in all_fns:

-             expected = ["config", "db_session", "msg"]

+             expected = ["config", "msg"]

              if six.PY2:

                  argspec = inspect.getargspec(callback)[0]

              else:
@@ -224,13 +226,12 @@ 

          if isinstance(msg, module_build_service.messaging.GreenwaveDecisionUpdate):

              return (

                  self.on_decision_update,

-                 greenwave.get_corresponding_module_build(

-                     db_session, msg.subject_identifier)

+                 greenwave.get_corresponding_module_build(msg.subject_identifier)

              )

  

          return None, None

  

-     def process_message(self, db_session, msg):

+     def process_message(self, msg):

          # Choose a handler for this message

          handler, build = self._map_message(db_session, msg)

  
@@ -253,7 +254,7 @@ 

          log.info("Calling %s", idx)

  

          try:

-             further_work = handler(conf, db_session, msg) or []

+             further_work = handler(conf, msg) or []

          except Exception as e:

              log.exception()

              db_session.rollback()

@@ -14,12 +14,13 @@ 

  from module_build_service.builder.KojiModuleBuilder import (

      koji_retrying_multicall_map, KojiModuleBuilder,

  )

+ from module_build_service.db_session import db_session

  from module_build_service.errors import UnprocessableEntity

  from module_build_service.resolver.base import GenericResolver

  from module_build_service.utils import retry

  

  

- def add_default_modules(db_session, mmd):

+ def add_default_modules(mmd):

      """

      Add default modules as buildrequires to the input modulemd.

  
@@ -27,7 +28,6 @@ 

      a URL to a text file in xmd.mbs.default_modules_url. Any default module that isn't in the

      database will be logged and ignored.

  

-     :param db_session: a SQLAlchemy database session

      :param Modulemd.ModuleStream mmd: the modulemd of the module to add the module defaults to

      :raises RuntimeError: if the buildrequired base module isn't in the database or the default

          modules list can't be downloaded

@@ -9,11 +9,12 @@ 

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service.utils.general import mmd_to_str

  from module_build_service import models, log, messaging

+ from module_build_service.db_session import db_session

  

  logging.basicConfig(level=logging.DEBUG)

  

  

- def _finalize(config, db_session, msg, state):

+ def _finalize(config, msg, state):

      """ Called whenever a koji build completes or fails. """

  

      # First, find our ModuleBuild associated with this component, if any.
@@ -149,18 +150,18 @@ 

          builder = module_build_service.builder.GenericBuilder.create_from_module(

              db_session, parent, config)

          further_work += module_build_service.utils.continue_batch_build(

-             config, parent, db_session, builder)

+             config, parent, builder)

  

      return further_work

  

  

- def complete(config, db_session, msg):

-     return _finalize(config, db_session, msg, state=koji.BUILD_STATES["COMPLETE"])

+ def complete(config, msg):

+     return _finalize(config, msg, state=koji.BUILD_STATES["COMPLETE"])

  

  

- def failed(config, db_session, msg):

-     return _finalize(config, db_session, msg, state=koji.BUILD_STATES["FAILED"])

+ def failed(config, msg):

+     return _finalize(config, msg, state=koji.BUILD_STATES["FAILED"])

  

  

- def canceled(config, db_session, msg):

-     return _finalize(config, db_session, msg, state=koji.BUILD_STATES["CANCELED"])

+ def canceled(config, msg):

+     return _finalize(config, msg, state=koji.BUILD_STATES["CANCELED"])

@@ -2,13 +2,13 @@ 

  # SPDX-License-Identifier: MIT

  from module_build_service import conf, log

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

+ from module_build_service.db_session import db_session

  from module_build_service.models import ModuleBuild, BUILD_STATES

  

  

- def get_corresponding_module_build(db_session, nvr):

+ def get_corresponding_module_build(nvr):

      """Find corresponding module build from database and return

  

-     :param session: the SQLAlchemy database session object.

      :param str nvr: module build NVR. This is the subject_identifier included

          inside ``greenwave.decision.update`` message.

      :return: the corresponding module build object. For whatever the reason,
@@ -31,13 +31,12 @@ 

      return ModuleBuild.get_by_id(db_session, module_build_id)

  

  

- def decision_update(config, db_session, msg):

+ def decision_update(config, msg):

      """Move module build to ready or failed according to Greenwave result

  

      :param config: the config object returned from function :func:`init_config`,

          which is loaded from configuration file.

      :type config: :class:`Config`

-     :param db_session: the SQLAlchemy database session object.

      :param msg: the message object representing a message received from topic

          ``greenwave.decision.update``.

      :type msg: :class:`GreenwaveDecisionUpdate`
@@ -68,7 +67,7 @@ 

          )

          return

  

-     build = get_corresponding_module_build(db_session, module_build_nvr)

+     build = get_corresponding_module_build(module_build_nvr)

  

      if build is None:

          log.debug(

@@ -15,6 +15,7 @@ 

      record_filtered_rpms,

      record_module_build_arches

  )

+ from module_build_service.db_session import db_session

  from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError

  from module_build_service.utils.greenwave import greenwave

  from module_build_service.scheduler.default_modules import (
@@ -39,7 +40,7 @@ 

      return os.path.basename(srpm_path).replace(".src.rpm", "")

  

  

- def failed(config, db_session, msg):

+ def failed(config, msg):

      """

      Called whenever a module enters the 'failed' state.

  
@@ -96,7 +97,7 @@ 

      module_build_service.builder.GenericBuilder.clear_cache(build)

  

  

- def done(config, db_session, msg):

+ def done(config, msg):

      """Called whenever a module enters the 'done' state.

  

      We currently don't do anything useful, so moving to ready.
@@ -128,7 +129,7 @@ 

      module_build_service.builder.GenericBuilder.clear_cache(build)

  

  

- def init(config, db_session, msg):

+ def init(config, msg):

      """ Called whenever a module enters the 'init' state."""

      # Sleep for a few seconds to make sure the module in the database is committed

      # TODO: Remove this once messaging is implemented in SQLAlchemy hooks
@@ -154,20 +155,20 @@ 

      failure_reason = "unspec"

      try:

          mmd = build.mmd()

-         record_module_build_arches(mmd, build, db_session)

+         record_module_build_arches(mmd, build)

          arches = [arch.name for arch in build.arches]

-         defaults_added = add_default_modules(db_session, mmd)

+         defaults_added = add_default_modules(mmd)

  

          # Format the modulemd by putting in defaults and replacing streams that

          # are branches with commit hashes

          format_mmd(mmd, build.scmurl, build, db_session)

-         record_component_builds(db_session, mmd, build)

+         record_component_builds(mmd, build)

  

          # The ursine.handle_stream_collision_modules is Koji specific.

          # It is also run only when Ursa Prime is not enabled for the base

          # module (`if not defaults_added`).

          if conf.system in ["koji", "test"] and not defaults_added:

-             handle_stream_collision_modules(db_session, mmd)

+             handle_stream_collision_modules(mmd)

  

          # Sets xmd["mbs"]["ursine_rpms"] with RPMs from the buildrequired base modules which

          # conflict with the RPMs from other buildrequired modules. This is done to prefer modular
@@ -182,7 +183,7 @@ 

                  ", ".join(conf.base_module_names)

              )

  

-         mmd = record_filtered_rpms(db_session, mmd)

+         mmd = record_filtered_rpms(mmd)

          build.modulemd = mmd_to_str(mmd)

          build.transition(db_session, conf, models.BUILD_STATES["wait"])

      # Catch custom exceptions that we can expose to the user
@@ -212,6 +213,7 @@ 

                  state_reason=error_msg,

                  failure_type=failure_reason,

              )

+             db_session.commit()

  

  

  def generate_module_build_koji_tag(build):
@@ -239,10 +241,9 @@ 

  @module_build_service.utils.retry(

      interval=10, timeout=120, wait_on=(ValueError, RuntimeError, ConnectionError)

  )

- def get_module_build_dependencies(db_session, build):

+ def get_module_build_dependencies(build):

      """Used by wait handler to get module's build dependencies

  

-     :param db_session: SQLAlchemy session object.

      :param build: a module build.

      :type build: :class:`ModuleBuild`

      :return: the value returned from :meth:`get_module_build_dependencies`
@@ -296,7 +297,7 @@ 

          return conf.koji_cg_default_build_tag

  

  

- def wait(config, db_session, msg):

+ def wait(config, msg):

      """ Called whenever a module enters the 'wait' state.

  

      We transition to this state shortly after a modulebuild is first requested.
@@ -331,7 +332,7 @@ 

          pass

  

      try:

-         build_deps = get_module_build_dependencies(db_session, build)

+         build_deps = get_module_build_dependencies(build)

      except ValueError:

          reason = "Failed to get module info from MBS. Max retries reached."

          log.exception(reason)
@@ -387,7 +388,7 @@ 

  

      # If all components in module build will be reused, we don't have to build

      # module-build-macros, because there won't be any build done.

-     if attempt_to_reuse_all_components(builder, db_session, build):

+     if attempt_to_reuse_all_components(builder, build):

          log.info("All components have been reused for module %r, skipping build" % build)

          build.transition(db_session, config, state=models.BUILD_STATES["build"])

          db_session.add(build)

@@ -7,11 +7,12 @@ 

  from datetime import datetime

  from module_build_service import models, log

  from module_build_service.utils import start_next_batch_build

+ from module_build_service.db_session import db_session

  

  logging.basicConfig(level=logging.DEBUG)

  

  

- def done(config, db_session, msg):

+ def done(config, msg):

      """ Called whenever koji rebuilds a repo, any repo. """

  

      # First, find our ModuleBuild associated with this repo, if any.
@@ -111,7 +112,7 @@ 

  

          # Try to start next batch build, because there are still unbuilt

          # components in a module.

-         further_work += start_next_batch_build(config, module_build, db_session, builder)

+         further_work += start_next_batch_build(config, module_build, builder)

  

      else:

          if has_failed_components:

@@ -6,11 +6,12 @@ 

  import logging

  import koji

  from module_build_service import models, log, messaging

+ from module_build_service.db_session import db_session

  

  logging.basicConfig(level=logging.DEBUG)

  

  

- def tagged(config, db_session, msg):

+ def tagged(config, msg):

      """ Called whenever koji tags a build to tag. """

      if config.system not in ("koji", "test"):

          return []

@@ -17,31 +17,34 @@ 

  from module_build_service.builder import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service.utils.greenwave import greenwave

+ from module_build_service.db_session import db_session

  

  

  class MBSProducer(PollingProducer):

      frequency = timedelta(seconds=conf.polling_interval)

  

      def poll(self):

-         with models.make_db_session(conf) as db_session:

-             try:

-                 self.log_summary(db_session)

-                 self.process_waiting_module_builds(db_session)

-                 self.process_open_component_builds(db_session)

-                 self.fail_lost_builds(db_session)

-                 self.process_paused_module_builds(conf, db_session)

-                 self.retrigger_new_repo_on_failure(conf, db_session)

-                 self.delete_old_koji_targets(conf, db_session)

-                 self.cleanup_stale_failed_builds(conf, db_session)

-                 self.sync_koji_build_tags(conf, db_session)

-                 self.poll_greenwave(conf, db_session)

-             except Exception:

-                 msg = "Error in poller execution:"

-                 log.exception(msg)

+         try:

+             self.log_summary()

+             self.process_waiting_module_builds()

+             self.process_open_component_builds()

+             self.fail_lost_builds()

+             self.process_paused_module_builds(conf)

+             self.retrigger_new_repo_on_failure(conf)

+             self.delete_old_koji_targets(conf)

+             self.cleanup_stale_failed_builds(conf)

+             self.sync_koji_build_tags(conf)

+             self.poll_greenwave(conf)

+         except Exception:

+             msg = "Error in poller execution:"

+             log.exception(msg)

+ 

+         # Poller runs in its own thread. Database session can be removed safely.

+         db_session.remove()

  

          log.info('Poller will now sleep for "{}" seconds'.format(conf.polling_interval))

  

-     def fail_lost_builds(self, db_session):

+     def fail_lost_builds(self):

          # This function is supposed to be handling only the part which can't be

          # updated through messaging (e.g. srpm-build failures). Please keep it

          # fit `n` slim. We do want rest to be processed elsewhere
@@ -120,7 +123,7 @@ 

          elif conf.system == "mock":

              pass

  

-     def cleanup_stale_failed_builds(self, conf, db_session):

+     def cleanup_stale_failed_builds(self, conf):

          """ Does various clean up tasks on stale failed module builds

          :param conf: the MBS configuration object

          :param db_session: a SQLAlchemy database session
@@ -170,7 +173,7 @@ 

                  db_session.add(module)

                  db_session.commit()

  

-     def log_summary(self, db_session):

+     def log_summary(self):

          log.info("Current status:")

          consumer = module_build_service.scheduler.consumer.get_global_consumer()

          backlog = consumer.incoming.qsize()
@@ -189,7 +192,7 @@ 

                          n = len([c for c in module_build.component_builds if c.batch == i])

                          log.info("      * {0} components in batch {1}".format(n, i))

  

-     def _nudge_module_builds_in_state(self, db_session, state_name, older_than_minutes):

+     def _nudge_module_builds_in_state(self, state_name, older_than_minutes):

          """

          Finds all the module builds in the `state` with `time_modified` older

          than `older_than_minutes` and adds fake MBSModule message to the
@@ -218,16 +221,16 @@ 

  

          db_session.commit()

  

-     def process_waiting_module_builds(self, db_session):

+     def process_waiting_module_builds(self):

          for state in ["init", "wait"]:

-             self._nudge_module_builds_in_state(db_session, state, 10)

+             self._nudge_module_builds_in_state(state, 10)

  

-     def process_open_component_builds(self, db_session):

+     def process_open_component_builds(self):

          log.warning("process_open_component_builds is not yet implemented...")

  

-     def process_paused_module_builds(self, config, db_session):

+     def process_paused_module_builds(self, config):

          log.info("Looking for paused module builds in the build state")

-         if module_build_service.utils.at_concurrent_component_threshold(config, db_session):

+         if module_build_service.utils.at_concurrent_component_threshold(config):

              log.debug(

                  "Will not attempt to start paused module builds due to "

                  "the concurrent build threshold being met"
@@ -262,7 +265,7 @@ 

                  if _has_missed_new_repo_message(module_build, builder.koji_session):

                      log.info("  Processing the paused module build %r", module_build)

                      further_work = module_build_service.utils.start_next_batch_build(

-                         config, module_build, db_session, builder)

+                         config, module_build, builder)

                      for event in further_work:

                          log.info("  Scheduling faked event %r" % event)

                          module_build_service.scheduler.consumer.work_queue_put(event)
@@ -271,7 +274,7 @@ 

              if module_build_service.utils.at_concurrent_component_threshold(config, db_session):

                  break

  

-     def retrigger_new_repo_on_failure(self, config, db_session):

+     def retrigger_new_repo_on_failure(self, config):

          """

          Retrigger failed new repo tasks for module builds in the build state.

  
@@ -302,7 +305,7 @@ 

  

          db_session.commit()

  

-     def delete_old_koji_targets(self, config, db_session):

+     def delete_old_koji_targets(self, config):

          """

          Deletes targets older than `config.koji_target_delete_time` seconds

          from Koji to cleanup after the module builds.
@@ -344,7 +347,7 @@ 

                  log.info("Removing target of module %r", module)

                  koji_session.deleteBuildTarget(target["id"])

  

-     def cancel_stuck_module_builds(self, config, db_session):

+     def cancel_stuck_module_builds(self, config):

          """

          Method transitions builds which are stuck in one state too long to the "failed" state.

          The states are defined with the "cleanup_stuck_builds_states" config option and the
@@ -392,7 +395,7 @@ 

              )

              db_session.commit()

  

-     def sync_koji_build_tags(self, config, db_session):

+     def sync_koji_build_tags(self, config):

          """

          Method checking the "tagged" and "tagged_in_final" attributes of

          "complete" ComponentBuilds in the current batch of module builds
@@ -448,7 +451,7 @@ 

                      log.info("  Scheduling faked event %r" % msg)

                      module_build_service.scheduler.consumer.work_queue_put(msg)

  

-     def poll_greenwave(self, config, db_session):

+     def poll_greenwave(self, config):

          """

          Polls Greenwave for all builds in done state

          :param db_session: SQLAlchemy DB session

@@ -5,15 +5,15 @@ 

  

  from module_build_service import conf, log, models

  import module_build_service.messaging

+ from module_build_service.db_session import db_session

  from .reuse import get_reusable_components, reuse_component

  

  

- def at_concurrent_component_threshold(config, db_session):

+ def at_concurrent_component_threshold(config):

      """

      Determines if the number of concurrent component builds has reached

      the configured threshold

      :param config: Module Build Service configuration object

-     :param db_session: SQLAlchemy database session

      :return: boolean representing if there are too many concurrent builds at

      this time

      """
@@ -79,7 +79,7 @@ 

          return

  

  

- def continue_batch_build(config, module, db_session, builder, components=None):

+ def continue_batch_build(config, module, builder, components=None):

      """

      Continues building current batch. Submits next components in the batch

      until it hits concurrent builds limit.
@@ -124,7 +124,7 @@ 

          if c.is_completed:

              continue

          # Check the concurrent build threshold.

-         if at_concurrent_component_threshold(config, db_session):

+         if at_concurrent_component_threshold(config):

              log.info("Concurrent build threshold met")

              break

  
@@ -152,7 +152,7 @@ 

      return further_work

  

  

- def start_next_batch_build(config, module, db_session, builder, components=None):

+ def start_next_batch_build(config, module, builder, components=None):

      """

      Tries to start the build of next batch. In case there are still unbuilt

      components in a batch, tries to submit more components until it hits
@@ -173,7 +173,7 @@ 

      # the new one. If there is, continue building current batch.

      if any(c.is_waiting_for_build for c in current_batch):

          log.info("Continuing building batch %d", module.batch)

-         return continue_batch_build(config, module, db_session, builder, components)

+         return continue_batch_build(config, module, builder, components)

  

      # Check that there are no components in BUILDING state in current batch.

      # If there are, wait until they are built.
@@ -243,7 +243,7 @@ 

      # the new one. This can happen when resubmitting the failed module build.

      if not unbuilt_components and not components:

          log.info("Skipping build of batch %d, no component to build.", module.batch)

-         return start_next_batch_build(config, module, db_session, builder)

+         return start_next_batch_build(config, module, builder)

  

      log.info("Starting build of next batch %d, %s" % (module.batch, unbuilt_components))

  
@@ -260,7 +260,7 @@ 

          should_try_reuse = all_reused_in_prev_batch or prev_batch == 1

      if should_try_reuse:

          component_names = [c.package for c in unbuilt_components]

-         reusable_components = get_reusable_components(db_session, module, component_names)

+         reusable_components = get_reusable_components(module, component_names)

          for c, reusable_c in zip(unbuilt_components, reusable_components):

              if reusable_c:

                  components_reused = True
@@ -281,4 +281,4 @@ 

          return further_work

  

      return further_work + continue_batch_build(

-         config, module, db_session, builder, unbuilt_components_after_reuse)

+         config, module, builder, unbuilt_components_after_reuse)

@@ -14,6 +14,7 @@ 

  from gi.repository.GLib import Error as ModuleMDError

  

  from module_build_service import conf, log, models, Modulemd

+ from module_build_service.db_session import db_session

  from module_build_service.errors import ValidationError, ProgrammingError, UnprocessableEntity

  

  
@@ -529,11 +530,10 @@ 

      return build, msgs

  

  

- def import_fake_base_module(db_session, nsvc):

+ def import_fake_base_module(nsvc):

      """

      Creates and imports new fake base module to be used with offline local builds.

  

-     :param db_session: SQLAlchemy session object.

      :param str nsvc: name:stream:version:context of a module.

      """

      name, stream, version, context = nsvc.split(":")
@@ -579,14 +579,13 @@ 

      return dnf_base.conf.releasever

  

  

- def import_builds_from_local_dnf_repos(db_session, platform_id=None):

+ def import_builds_from_local_dnf_repos(platform_id=None):

      """

      Imports the module builds from all available local repositories to MBS DB.

  

      This is used when building modules locally without any access to MBS infra.

      This method also generates and imports the base module according to /etc/os-release.

  

-     :param db_session: SQLAlchemy session object.

      :param str platform_id: The `name:stream` of a fake platform module to generate in this

          method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID.

      """
@@ -635,14 +634,13 @@ 

      # Create the fake platform:stream:1:000000 module to fulfill the

      # dependencies for local offline build and also to define the

      # srpm-buildroot and buildroot.

-     import_fake_base_module(db_session, "%s:1:000000" % platform_id)

+     import_fake_base_module("%s:1:000000" % platform_id)

  

  

- def get_build_arches(db_session, mmd, config):

+ def get_build_arches(mmd, config):

      """

      Returns the list of architectures for which the module `mmd` should be built.

  

-     :param db_session: SQLAlchemy session object.

      :param mmd: Module MetaData

      :param config: config (module_build_service.config.Config instance)

      :return list of architectures

@@ -4,6 +4,7 @@ 

  

  import module_build_service.messaging

  from module_build_service import log, models, conf

+ from module_build_service.db_session import db_session

  from module_build_service.utils.mse import get_base_module_mmds

  from module_build_service.resolver import GenericResolver

  
@@ -13,6 +14,10 @@ 

      Reuses component build `previous_component_build` instead of building

      component `component`

  

+     Please remember to commit the changes where the function is called.

+     This allows callers to reuse multiple component builds and commit them all

+     at once.

+ 

      Returns the list of BaseMessage instances to be handled later by the

      scheduler.

      """
@@ -54,14 +59,13 @@ 

      ]

  

  

- def get_reusable_module(db_session, module):

+ def get_reusable_module(module):

      """

      Returns previous module build of the module `module` in case it can be

      used as a source module to get the components to reuse from.

  

      In case there is no such module, returns None.

  

-     :param db_session: SQLAlchemy database session

      :param module: the ModuleBuild object of module being built.

      :return: ModuleBuild object which can be used for component reuse.

      """
@@ -164,7 +168,7 @@ 

      return previous_module_build

  

  

- def attempt_to_reuse_all_components(builder, db_session, module):

+ def attempt_to_reuse_all_components(builder, module):

      """

      Tries to reuse all the components in a build. The components are also

      tagged to the tags using the `builder`.
@@ -173,7 +177,7 @@ 

      False is returned, no component has been reused.

      """

  

-     previous_module_build = get_reusable_module(db_session, module)

+     previous_module_build = get_reusable_module(module)

      if not previous_module_build:

          return False

  
@@ -189,7 +193,6 @@ 

          if c.package == "module-build-macros":

              continue

          component_to_reuse = get_reusable_component(

-             db_session,

              module,

              c.package,

              previous_module_build=previous_module_build,
@@ -221,7 +224,7 @@ 

      return True

  

  

- def get_reusable_components(db_session, module, component_names, previous_module_build=None):

+ def get_reusable_components(module, component_names, previous_module_build=None):

      """

      Returns the list of ComponentBuild instances belonging to previous module

      build which can be reused in the build of module `module`.
@@ -232,7 +235,6 @@ 

      In case some component cannot be reused, None is used instead of a

      ComponentBuild instance in the returned list.

  

-     :param db_session: SQLAlchemy database session

      :param module: the ModuleBuild object of module being built.

      :param component_names: List of component names to be reused.

      :kwarg previous_module_build: the ModuleBuild instance of a module build
@@ -246,7 +248,7 @@ 

          return [None] * len(component_names)

  

      if not previous_module_build:

-         previous_module_build = get_reusable_module(db_session, module)

+         previous_module_build = get_reusable_module(module)

      if not previous_module_build:

          return [None] * len(component_names)

  
@@ -257,19 +259,19 @@ 

      for component_name in component_names:

          ret.append(

              get_reusable_component(

-                 db_session, module, component_name, previous_module_build, mmd, old_mmd)

+                 module, component_name, previous_module_build, mmd, old_mmd)

          )

  

      return ret

  

  

  def get_reusable_component(

-     db_session, module, component_name, previous_module_build=None, mmd=None, old_mmd=None

+     module, component_name, previous_module_build=None, mmd=None, old_mmd=None

  ):

      """

      Returns the component (RPM) build of a module that can be reused

      instead of needing to rebuild it

-     :param db_session: SQLAlchemy database session

+ 

      :param module: the ModuleBuild object of module being built with a formatted

          mmd

      :param component_name: the name of the component (RPM) that you'd like to
@@ -303,7 +305,7 @@ 

          return None

  

      if not previous_module_build:

-         previous_module_build = get_reusable_module(db_session, module)

+         previous_module_build = get_reusable_module(module)

          if not previous_module_build:

              message = ("Cannot reuse because no previous build of "

                         "module {module_name} found!").format(

@@ -17,6 +17,7 @@ 

  

  import module_build_service.scm

  from module_build_service import conf, log, models, Modulemd

+ from module_build_service.db_session import db_session

  from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden, Conflict

  from module_build_service.utils import (

      to_text_type, deps_to_dict, mmd_to_str, load_mmd, load_mmd_file,
@@ -24,29 +25,25 @@ 

  )

  

  

- def record_module_build_arches(mmd, build, db_session):

+ def record_module_build_arches(mmd, build):

      """

      Finds out the list of build arches against which the ModuleBuld `build` should be built

      and records them to `build.arches`.

  

      :param Modulemd mmd: The MMD file associated with a ModuleBuild.

      :param ModuleBuild build: The ModuleBuild.

-     :param db_session: Database session.

      """

-     arches = get_build_arches(db_session, mmd, conf)

+     arches = get_build_arches(mmd, conf)

      for arch in arches:

          arch_obj = db_session.query(models.ModuleArch).filter_by(name=arch).first()

          if not arch_obj:

              arch_obj = models.ModuleArch(name=arch)

-             db_session.add(arch_obj)

-             db_session.commit()

+         build.arches.append(arch_obj)

  

-         if arch_obj not in build.arches:

-             build.arches.append(arch_obj)

-             db_session.add(build)

+     db_session.commit()

  

  

- def record_filtered_rpms(db_session, mmd):

+ def record_filtered_rpms(mmd):

      """Record filtered RPMs that should not be installed into buildroot

  

      These RPMs are filtered:
@@ -54,7 +51,6 @@ 

      * Reads the mmd["xmd"]["buildrequires"] and extends it with "filtered_rpms"

        list containing the NVRs of filtered RPMs in a buildrequired module.

  

-     :param db_session: SQLAlchemy session object.

      :param Modulemd mmd: Modulemd that will be built next.

      :rtype: Modulemd.Module

      :return: Modulemd extended with the "filtered_rpms" in XMD section.
@@ -395,7 +391,7 @@ 

  

  

  def record_component_builds(

-     db_session, mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None

+     mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None

  ):

      # Imported here to allow import of utils in GenericBuilder.

      import module_build_service.builder
@@ -466,7 +462,7 @@ 

              included_mmd = fetch_mmd(full_url, whitelist_url=True)[0]

              format_mmd(included_mmd, module.scmurl, module, db_session)

              batch = record_component_builds(

-                 db_session, included_mmd, module, batch, previous_buildorder, main_mmd)

+                 included_mmd, module, batch, previous_buildorder, main_mmd)

              continue

  

          package = component.get_name()
@@ -1120,12 +1116,11 @@ 

      return mmd, scm

  

  

- def load_local_builds(db_session, local_build_nsvs):

+ def load_local_builds(local_build_nsvs):

      """

      Loads previously finished local module builds from conf.mock_resultsdir

      and imports them to database.

  

-     :param db_session: SQLAlchemy session object.

      :param local_build_nsvs: List of NSV separated by ':' defining the modules

          to load from the mock_resultsdir.

      """

@@ -3,6 +3,7 @@ 

  import re

  

  from module_build_service import conf, log

+ from module_build_service.db_session import db_session

  from module_build_service.resolver import GenericResolver

  

  
@@ -86,7 +87,7 @@ 

      ]

  

  

- def get_modulemds_from_ursine_content(db_session, tag):

+ def get_modulemds_from_ursine_content(tag):

      """Get all modules metadata which were added to ursine content

  

      Ursine content is the tag inheritance managed by Ursa-Major by adding
@@ -103,7 +104,6 @@ 

      So, this function is to find out all module koji_tags from the build tag

      and return corresponding module metadata.

  

-     :param db_session: SQLAlchemy database session.

      :param str tag: a base module's koji_tag.

      :return: list of module metadata. Empty list will be returned if no ursine

          modules metadata is found.
@@ -131,12 +131,11 @@ 

      return modulemds

  

  

- def find_stream_collision_modules(db_session, buildrequired_modules, koji_tag):

+ def find_stream_collision_modules(buildrequired_modules, koji_tag):

      """

      Find buildrequired modules that are part of the ursine content represented

      by the koji_tag but with a different stream.

  

-     :param db_session: SQLAlchemy database session.

      :param dict buildrequired_modules: a mapping of buildrequires, which is just

          the ``xmd/mbs/buildrequires``. This mapping is used to determine if a module

          found from ursine content is a buildrequire with different stream.
@@ -147,7 +146,7 @@ 

          found, an empty list is returned.

      :rtype: list[str]

      """

-     ursine_modulemds = get_modulemds_from_ursine_content(db_session, koji_tag)

+     ursine_modulemds = get_modulemds_from_ursine_content(koji_tag)

      if not ursine_modulemds:

          log.debug("No module metadata is found from ursine content.")

          return []
@@ -176,7 +175,7 @@ 

      return collision_modules

  

  

- def handle_stream_collision_modules(db_session, mmd):

+ def handle_stream_collision_modules(mmd):

      """

      Find out modules from ursine content and record those that are buildrequire

      module but have different stream. And finally, record built RPMs of these
@@ -195,7 +194,6 @@ 

      which is a list of NSVC strings. Each of them is the module added to ursine

      content by Ursa-Major.

  

-     :param db_session: SQLAlchemy database session.

      :param mmd: a module's metadata which will be built.

      :type mmd: Modulemd.Module

      """
@@ -228,13 +226,13 @@ 

              continue

  

          modules_nsvc = find_stream_collision_modules(

-             db_session, buildrequires, base_module_info["koji_tag"])

+             buildrequires, base_module_info["koji_tag"])

  

          if modules_nsvc:

              # Save modules NSVC for later use in subsequent event handlers to

              # log readable messages.

              base_module_info["stream_collision_modules"] = modules_nsvc

-             base_module_info["ursine_rpms"] = find_module_built_rpms(db_session, modules_nsvc)

+             base_module_info["ursine_rpms"] = find_module_built_rpms(modules_nsvc)

          else:

              log.info("No stream collision module is found against base module %s.", module_name)

              # Always set in order to mark it as handled already.
@@ -244,10 +242,9 @@ 

      mmd.set_xmd(xmd)

  

  

- def find_module_built_rpms(db_session, modules_nsvc):

+ def find_module_built_rpms(modules_nsvc):

      """Find out built RPMs of given modules

  

-     :param db_session: SQLAlchemy database session.

      :param modules_nsvc: a list of modules' NSVC to find out built RPMs for

          each of them.

      :type modules_nsvc: list[str]

file modified
+14 -2
@@ -6,10 +6,11 @@ 

  

  import json

  import module_build_service.auth

- from flask import request, url_for

+ from flask import request, url_for, Blueprint, Response

  from flask.views import MethodView

  from six import string_types

  from io import BytesIO

+ from prometheus_client import generate_latest, CONTENT_TYPE_LATEST

  

  from module_build_service import app, conf, log, models, db, version, api_version as max_api_version

  from module_build_service.utils import (
@@ -27,7 +28,7 @@ 

  )

  from module_build_service.errors import ValidationError, Forbidden, NotFound, ProgrammingError

  from module_build_service.backports import jsonify

- from module_build_service.monitor import monitor_api

+ from module_build_service.monitor import registry

  

  

  api_routes = {
@@ -504,6 +505,17 @@ 

      return data

  

  

+ monitor_api = Blueprint(

+     "monitor", __name__, url_prefix="/module-build-service/<int:api_version>/monitor")

+ 

+ 

+ @cors_header()

+ @validate_api_version()

+ @monitor_api.route("/metrics")

+ def metrics(api_version):

+     return Response(generate_latest(registry), content_type=CONTENT_TYPE_LATEST)

+ 

+ 

  def register_api():

      """ Registers the MBS API. """

      module_view = ModuleBuildAPI.as_view("module_builds")

file modified
+17 -13
@@ -17,9 +17,10 @@ 

  from module_build_service.config import init_config

  from module_build_service.models import (

      ModuleBuild, ModuleArch, ComponentBuild, VirtualStream,

-     make_db_session, BUILD_STATES,

+     BUILD_STATES,

  )

  from module_build_service import Modulemd

+ from module_build_service.db_session import db_session

  

  

  base_dir = os.path.dirname(__file__)
@@ -101,10 +102,14 @@ 

      Please note that, this function relies on database objects managed by

      Flask-SQLAlchemy.

      """

-     # Ensure all pending transactions are committed and do not block subsequent

-     # DML on tables.

-     # TODO: Should the code be fixed that forget to commit?

-     db.session.commit()

+ 

+     # Helpful for writing tests if any changes were made using the database

+     # session but the test didn't commit or rollback.

+     #

+     # clean_database is usually called before a test run. So, it makes no sense

+     # to keep any changes in the transaction made by previous test.

+     db_session.remove()

+ 

      db.drop_all()

      db.create_all()

  
@@ -130,6 +135,7 @@ 

          the generated base module streams.

      """

      clean_database()

+ 

      if multiple_stream_versions:

          if multiple_stream_versions is True:

              multiple_stream_versions = ["f28.0.0", "f29.0.0", "f29.1.0", "f29.2.0"]
@@ -147,11 +153,11 @@ 

              # Just to possibly confuse tests by adding another base module.

              mmd = mmd.copy("bootstrap", stream)

              import_mmd(db.session, mmd)

-     with make_db_session(conf) as db_session:

-         _populate_data(db_session, data_size, contexts=contexts, scratch=scratch)

+ 

+     _populate_data(data_size, contexts=contexts, scratch=scratch)

  

  

- def _populate_data(db_session, data_size=10, contexts=False, scratch=False):

+ def _populate_data(data_size=10, contexts=False, scratch=False):

      # Query arch from passed database session, otherwise there will be an error

      # like "Object '<ModuleBuild at 0x7f4ccc805c50>' is already attached to

      # session '275' (this is '276')" when add new module build object to passed
@@ -326,7 +332,7 @@ 

          db_session.commit()

  

  

- def scheduler_init_data(db_session, tangerine_state=None, scratch=False):

+ def scheduler_init_data(tangerine_state=None, scratch=False):

      """ Creates a testmodule in the building state with all the components in the same batch

      """

      clean_database()
@@ -435,7 +441,6 @@ 

      base_module=None,

      filtered_rpms=None,

      xmd=None,

-     db_session=None,

      store_to_db=False,

      virtual_streams=None,

      arches=None,
@@ -459,7 +464,6 @@ 

      :param dict xmd: a mapping representing XMD section in module metadata. A

          custom xmd could be passed for testing a particular scenario and some

          default key/value pairs are added if not present.

-     :param db_session: SQLAlchemy database session.

      :param bool store_to_db: whether to store created module metadata to the

          database. If set to True, ``db_session`` is required.

      :param virtual_streams: List of virtual streams provided by this module.
@@ -479,9 +483,9 @@ 

      if base_module:

          assert db_session is not None

      if virtual_streams:

-         assert db_session and store_to_db

+         assert store_to_db

      if arches:

-         assert db_session and store_to_db

+         assert store_to_db

  

      name, stream, version, context = nsvc.split(":")

      mmd = Modulemd.ModuleStreamV2.new(name, stream)

file modified
+5 -11
@@ -8,8 +8,8 @@ 

  

  import module_build_service

  

- from module_build_service import conf

- from module_build_service.models import make_db_session, BUILD_STATES

+ from module_build_service.models import BUILD_STATES

+ from module_build_service.db_session import db_session

  from module_build_service.utils.general import mmd_to_str, load_mmd, get_rpm_release

  from tests import clean_database, read_staged_data, module_build_from_modulemd

  
@@ -48,13 +48,7 @@ 

  

  

  @pytest.fixture()

- def db_session():

-     with make_db_session(conf) as db_session:

-         yield db_session

- 

- 

- @pytest.fixture()

- def model_tests_init_data(db_session):

+ def model_tests_init_data():

      """Initialize data for model tests

  

      This is refactored from tests/test_models/__init__.py, which was able to be
@@ -78,7 +72,7 @@ 

  

  

  @pytest.fixture()

- def reuse_component_init_data(db_session):

+ def reuse_component_init_data():

      clean_database()

  

      mmd = load_mmd(read_staged_data("formatted_testmodule"))
@@ -265,7 +259,7 @@ 

  

  

  @pytest.fixture()

- def reuse_shared_userspace_init_data(db_session):

+ def reuse_shared_userspace_init_data():

      # Create shared-userspace-570, state is COMPLETE, all components

      # are properly built.

      scmurl = "https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453"

file modified
+78 -65
@@ -16,6 +16,7 @@ 

  import module_build_service.utils

  from module_build_service.errors import Forbidden

  from module_build_service import models, conf, build_logs

+ from module_build_service.db_session import db_session

  from module_build_service.scheduler import make_simple_stop_condition

  

  from mock import patch, PropertyMock, Mock, MagicMock
@@ -337,10 +338,10 @@ 

  

  class BaseTestBuild:

  

-     def run_scheduler(self, db_session, msgs=None, stop_condition=None):

+     def run_scheduler(self, msgs=None, stop_condition=None):

          module_build_service.scheduler.main(

              msgs or [],

-             stop_condition or make_simple_stop_condition(db_session)

+             stop_condition or make_simple_stop_condition()

          )

  

  
@@ -413,8 +414,7 @@ 

          cleanup_moksha()

          for i in range(20):

              try:

-                 with models.make_db_session(conf) as db_session:

-                     os.remove(build_logs.path(db_session, i))

+                 os.remove(build_logs.path(db_session, i))

              except Exception:

                  pass

  
@@ -422,7 +422,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -474,7 +474,7 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -501,7 +501,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_buildonly(

-         self, mocked_scm, mocked_get_user, mocked_get_session, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, mocked_get_session, conf_system, dbg, hmsc

      ):

          def list_build_rpms(nvr):

              assert nvr == "perl-List-Compare-1-1"
@@ -553,7 +553,7 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -569,7 +569,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_no_components(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, gating_result, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, gating_result

      ):

          """

          Tests the build of a module with no components
@@ -581,6 +581,7 @@ 

              "python3-no-components.yaml",

              "620ec77321b2ea7b0d67d82992dda3e1d67055b4",

          )

+ 

          rv = self.client.post(

              "/module-build-service/1/module-builds/",

              data=json.dumps({
@@ -593,9 +594,10 @@ 

          data = json.loads(rv.data)

          module_build_id = data["id"]

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

+ 

          # Make sure no component builds were registered

          assert len(module_build.component_builds) == 0

          # Make sure the build is done
@@ -665,7 +667,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_from_yaml_allowed(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -687,7 +689,7 @@ 

              module_build_id = data["id"]

              assert module_build_id == 2

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module_build.state == models.BUILD_STATES["ready"]
@@ -695,7 +697,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_cancel(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Submit all builds for a module and cancel the module build later.
@@ -739,7 +741,7 @@ 

          FakeModuleBuilder.on_cancel_cb = on_cancel_cb

          FakeModuleBuilder.on_finalize_cb = on_finalize_cb

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          # Because we did not finished single component build and canceled the

          # module build, all components and even the module itself should be in
@@ -757,7 +759,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_instant_complete(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -779,7 +781,7 @@ 

          module_build_id = data["id"]

          FakeModuleBuilder.INSTANT_COMPLETE = True

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -799,7 +801,7 @@ 

      )

      def test_submit_build_concurrent_threshold(

          self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

-         conf_system, dbg, hmsc, db_session

+         conf_system, dbg, hmsc

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder with
@@ -825,16 +827,18 @@ 

              Stop the scheduler when the module is built or when we try to build

              more components than the num_concurrent_builds.

              """

-             main_stop = make_simple_stop_condition(db_session)

+             main_stop = make_simple_stop_condition()

              build_count = (

                  db_session.query(models.ComponentBuild).filter_by(

                      state=koji.BUILD_STATES["BUILDING"]

                  ).count()

              )

              over_threshold = conf.num_concurrent_builds < build_count

-             return main_stop(message) or over_threshold

+             result = main_stop(message) or over_threshold

+             db_session.remove()

+             return result

  

-         self.run_scheduler(db_session, stop_condition=stop)

+         self.run_scheduler(stop_condition=stop)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -856,7 +860,7 @@ 

      )

      def test_try_to_reach_concurrent_threshold(

          self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

-         conf_system, dbg, hmsc, db_session

+         conf_system, dbg, hmsc

      ):

          """

          Tests that we try to submit new component build right after
@@ -887,7 +891,7 @@ 

              Stop the scheduler when the module is built or when we try to build

              more components than the num_concurrent_builds.

              """

-             main_stop = module_build_service.scheduler.make_simple_stop_condition(db_session)

+             main_stop = module_build_service.scheduler.make_simple_stop_condition()

              num_building = (

                  db_session.query(models.ComponentBuild)

                  .filter_by(state=koji.BUILD_STATES["BUILDING"])
@@ -895,9 +899,11 @@ 

              )

              over_threshold = conf.num_concurrent_builds < num_building

              TestBuild._global_var.append(num_building)

-             return main_stop(message) or over_threshold

+             result = main_stop(message) or over_threshold

+             db_session.remove()

+             return result

  

-         self.run_scheduler(db_session, stop_condition=stop)

+         self.run_scheduler(stop_condition=stop)

  

          # _global_var looks similar to this: [0, 1, 0, 0, 2, 2, 1, 0, 0, 0]

          # It shows the number of concurrent builds in the time. At first we
@@ -921,7 +927,7 @@ 

      )

      def test_build_in_batch_fails(

          self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

-         conf_system, dbg, hmsc, db_session

+         conf_system, dbg, hmsc

      ):

          """

          Tests that if the build in batch fails, other components in a batch
@@ -959,7 +965,7 @@ 

  

          FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              # perl-Tangerine is expected to fail as configured in on_build_cb.
@@ -990,7 +996,7 @@ 

      )

      def test_all_builds_in_batch_fail(

          self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

-         conf_system, dbg, hmsc, db_session

+         conf_system, dbg, hmsc

      ):

          """

          Tests that if the build in batch fails, other components in a batch
@@ -1019,7 +1025,7 @@ 

  

          FakeModuleBuilder.on_build_cb = on_build_cb

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              # perl-Tangerine is expected to fail as configured in on_build_cb.
@@ -1044,7 +1050,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_reuse_all(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that we do not try building module-build-macros when reusing all
@@ -1083,9 +1089,10 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

+         from module_build_service.db_session import db_session

+ 

          # Create a dedicated database session for scheduler to avoid hang

-         with models.make_db_session(conf) as scheduler_db_session:

-             self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])

+         self.run_scheduler(msgs=[MBSModule("local module build", 3, 1)])

  

          reused_component_ids = {

              "module-build-macros": None,
@@ -1108,7 +1115,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_reuse_all_without_build_macros(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that we can reuse components even when the reused module does
@@ -1116,6 +1123,8 @@ 

          """

          # Firstly, remove all existing module-build-macros component builds

  

+         from module_build_service.db_session import db_session

+ 

          macros_cb_query = db_session.query(models.ComponentBuild).filter_by(

              package="module-build-macros")

          db_session.query(models.ComponentBuildTrace).filter(
@@ -1162,8 +1171,7 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         with models.make_db_session(conf) as scheduler_db_session:

-             self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])

+         self.run_scheduler(msgs=[MBSModule("local module build", 3, 1)])

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1178,7 +1186,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that resuming the build works even when previous batches
@@ -1299,8 +1307,7 @@ 

          db_session.expire_all()

  

          # Run the backend

-         with models.make_db_session(conf) as scheduler_db_session:

-             self.run_scheduler(scheduler_db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1314,7 +1321,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_recover_orphaned_macros(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that resuming the build works when module-build-macros is orphaned but marked as
@@ -1421,8 +1428,7 @@ 

          db_session.expire_all()

  

          # Run the backend

-         with models.make_db_session(conf) as scheduler_db_session:

-             self.run_scheduler(scheduler_db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1436,7 +1442,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_failed_init(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that resuming the build works when the build failed during the init step
@@ -1455,11 +1461,12 @@ 

                  }),

              )

              # Run the backend so that it fails in the "init" handler

-             self.run_scheduler(db_session)

+             self.run_scheduler()

              cleanup_moksha()

  

          module_build_id = json.loads(rv.data)["id"]

          module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

+         db_session.refresh(module_build)

          assert module_build.state == models.BUILD_STATES["failed"]

          assert module_build.state_reason == "Custom component repositories aren't allowed."

          assert len(module_build.module_builds_trace) == 2
@@ -1490,10 +1497,9 @@ 

          assert module_build.state_reason == "Resubmitted by Homer J. Simpson"

          # Make sure there are no components

          assert components == []

-         db_session.expire_all()

  

          # Run the backend again

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1507,7 +1513,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_init_fail(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that resuming the build fails when the build is in init state
@@ -1525,7 +1531,7 @@ 

          )

          assert rv.status_code == 201

          # Run the backend

-         self.run_scheduler(db_session)

+         self.run_scheduler()

          # Post again and make sure it fails

          rv2 = self.client.post(

              "/module-build-service/1/module-builds/",
@@ -1554,7 +1560,7 @@ 

          return_value=True,

      )

      def test_submit_scratch_vs_normal(

-         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that submitting a scratch build with the same NSV as a previously
@@ -1578,7 +1584,7 @@ 

          # make sure normal build has expected context without a suffix

          assert module_build.context == "9c690d0e"

          # Run the backend

-         self.run_scheduler(db_session)

+         self.run_scheduler()

          # Post again as a scratch build and make sure it succeeds

          post_data["scratch"] = True

          rv2 = self.client.post(post_url, data=json.dumps(post_data))
@@ -1597,7 +1603,7 @@ 

          return_value=True,

      )

      def test_submit_normal_vs_scratch(

-         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that submitting a normal build with the same NSV as a previously
@@ -1622,7 +1628,7 @@ 

          # make sure scratch build has expected context with unique suffix

          assert module_build.context == "9c690d0e_1"

          # Run the backend

-         self.run_scheduler(db_session)

+         self.run_scheduler()

          # Post again as a non-scratch build and make sure it succeeds

          post_data["scratch"] = False

          rv2 = self.client.post(post_url, data=json.dumps(post_data))
@@ -1641,7 +1647,7 @@ 

          return_value=True,

      )

      def test_submit_scratch_vs_scratch(

-         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that submitting a scratch build with the same NSV as a previously
@@ -1665,7 +1671,7 @@ 

          # make sure first scratch build has expected context with unique suffix

          assert module_build.context == "9c690d0e_1"

          # Run the backend

-         self.run_scheduler(db_session)

+         self.run_scheduler()

          # Post scratch build again and make sure it succeeds

          rv2 = self.client.post(post_url, data=json.dumps(post_data))

          assert rv2.status_code == 201
@@ -1678,7 +1684,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_repo_regen_not_started_batch(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Tests that if MBS starts a new batch, the concurrent component threshold is met before a
@@ -1705,17 +1711,19 @@ 

              # Stop the backend if the module batch is 2 (where we simulate the concurrent threshold

              # being met). For safety, also stop the backend if the module erroneously completes.

              module = db_session.query(models.ModuleBuild).get(module_build_id)

-             return module.batch == 2 or module.state >= models.BUILD_STATES["done"]

+             result = module.batch == 2 or module.state >= models.BUILD_STATES["done"]

+             db_session.remove()

+             return result

  

          with patch(

              "module_build_service.utils.batches.at_concurrent_component_threshold"

          ) as mock_acct:

              # Once we get to batch 2, then simulate the concurrent threshold being met

-             def _at_concurrent_component_threshold(config, session):

-                 return session.query(models.ModuleBuild).get(module_build_id).batch == 2

+             def _at_concurrent_component_threshold(config):

+                 return db_session.query(models.ModuleBuild).get(module_build_id).batch == 2

  

              mock_acct.side_effect = _at_concurrent_component_threshold

-             self.run_scheduler(db_session, stop_condition=_stop_condition)

+             self.run_scheduler(stop_condition=_stop_condition)

  

          # Only module-build-macros should be built

          for build in (
@@ -1737,7 +1745,13 @@ 

          ]

          db_session.expire_all()

          # Stop after processing the seeded message

-         self.run_scheduler(db_session, msgs, lambda message: True)

+ 

+         def stop(message):

+             db_session.remove()

+             return True

+ 

+         self.run_scheduler(msgs, stop_condition=stop)

+ 

          # Make sure the module build didn't fail so that the poller can resume it later

          module = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module.state == models.BUILD_STATES["build"]
@@ -1745,7 +1759,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_br_metadata_only_module(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

      ):

          """

          Test that when a build is submitted with a buildrequire without a Koji tag,
@@ -1779,7 +1793,7 @@ 

              assert set(dependencies.keys()) == {"module-f28-build"}

  

          FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          module = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module.state == models.BUILD_STATES["ready"]
@@ -1801,8 +1815,7 @@ 

          cleanup_moksha()

          for i in range(20):

              try:

-                 with models.make_db_session(conf) as db_session:

-                     os.remove(build_logs.path(db_session, i))

+                 os.remove(build_logs.path(db_session, i))

              except Exception:

                  pass

  
@@ -1815,12 +1828,12 @@ 

          return_value=staged_data_filename('local_builds'),

      )

      def test_submit_build_local_dependency(

-         self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc, db_session

+         self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc

      ):

          """

          Tests local module build dependency.

          """

-         module_build_service.utils.load_local_builds(db_session, ["platform"])

+         module_build_service.utils.load_local_builds(["platform"])

          FakeSCM(

              mocked_scm,

              "testmodule",
@@ -1844,7 +1857,7 @@ 

          # the local one and not the main one.

          FakeModuleBuilder.DEFAULT_GROUPS = {"srpm-build": {"bar"}, "build": {"foo"}}

  

-         self.run_scheduler(db_session)

+         self.run_scheduler()

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

@@ -8,6 +8,7 @@ 

  

  from tests import init_data

  

+ from module_build_service.db_session import db_session

  from module_build_service.builder import GenericBuilder

  from mock import patch

  
@@ -18,7 +19,7 @@ 

  

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.builder.base.GenericResolver")

-     def test_default_buildroot_groups_cache(self, generic_resolver, resolver, db_session):

+     def test_default_buildroot_groups_cache(self, generic_resolver, resolver):

          mbs_groups = {"buildroot": [], "srpm-buildroot": []}

  

          resolver = mock.MagicMock()

file modified
+44 -25
@@ -15,6 +15,7 @@ 

  import module_build_service.models

  import module_build_service.builder

  from module_build_service import Modulemd

+ from module_build_service.db_session import db_session

  from module_build_service.utils.general import mmd_to_str

  

  import pytest
@@ -116,7 +117,7 @@ 

              "/module-base-runtime-0.25-9/latest/x86_64"

          )

  

-     def test_recover_orphaned_artifact_when_tagged(self, db_session):

+     def test_recover_orphaned_artifact_when_tagged(self):

          """ Test recover_orphaned_artifact when the artifact is found and tagged in both tags

          """

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -144,6 +145,10 @@ 

          component_build.nvr = None

  

          actual = builder.recover_orphaned_artifact(component_build)

+         # recover_orphaned_artifact modifies a component build, but doesn't

+         # commit the changes.

+         db_session.commit()

+ 

          assert len(actual) == 3

          assert type(actual[0]) == module_build_service.messaging.KojiBuildChange

          assert actual[0].build_id == 91
@@ -164,7 +169,7 @@ 

          assert component_build.state_reason == "Found existing build"

          assert builder.koji_session.tagBuild.call_count == 0

  

-     def test_recover_orphaned_artifact_when_untagged(self, db_session):

+     def test_recover_orphaned_artifact_when_untagged(self):

          """ Tests recover_orphaned_artifact when the build is found but untagged

          """

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -194,8 +199,11 @@ 

          component_build.task_id = None

          component_build.nvr = None

          component_build.state = None

+         db_session.commit()

  

          actual = builder.recover_orphaned_artifact(component_build)

+         db_session.commit()

+ 

          assert len(actual) == 1

          assert type(actual[0]) == module_build_service.messaging.KojiBuildChange

          assert actual[0].build_id == 91
@@ -210,7 +218,7 @@ 

          assert component_build.state_reason == "Found existing build"

          builder.koji_session.tagBuild.assert_called_once_with(2, "foo-1.0-1.{0}".format(dist_tag))

  

-     def test_recover_orphaned_artifact_when_module_build_macros_untagged(self, db_session):

+     def test_recover_orphaned_artifact_when_module_build_macros_untagged(self):

          """ Tests recover_orphaned_artifact when module-build-macros is found but untagged

          """

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -245,8 +253,11 @@ 

          component_build.task_id = None

          component_build.nvr = None

          component_build.state = None

+         db_session.commit()

  

          actual = builder.recover_orphaned_artifact(component_build)

+         db_session.commit()

+ 

          assert len(actual) == 1

          assert type(actual[0]) == module_build_service.messaging.KojiBuildChange

          assert actual[0].build_id == 91
@@ -267,7 +278,7 @@ 

              [mock.call(2, "srpm-build", "module-build-macros"),

               mock.call(2, "build", "module-build-macros")])

  

-     def test_recover_orphaned_artifact_when_nothing_exists(self, db_session):

+     def test_recover_orphaned_artifact_when_nothing_exists(self):

          """ Test recover_orphaned_artifact when the build is not found

          """

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -294,14 +305,17 @@ 

          component_build.task_id = None

          component_build.nvr = None

          component_build.state = None

+         db_session.commit()

  

          actual = builder.recover_orphaned_artifact(component_build)

+         db_session.commit()

+ 

          assert actual == []

          # Make sure nothing erroneous gets tag

          assert builder.koji_session.tagBuild.call_count == 0

  

      @patch("koji.util")

-     def test_buildroot_ready(self, mocked_kojiutil, db_session):

+     def test_buildroot_ready(self, mocked_kojiutil):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

  

          attrs = {"checkForBuilds.return_value": None, "checkForBuilds.side_effect": IOError}
@@ -321,7 +335,7 @@ 

          assert mocked_kojiutil.checkForBuilds.call_count == 3

  

      @pytest.mark.parametrize("blocklist", [False, True])

-     def test_tagging_already_tagged_artifacts(self, blocklist, db_session):

+     def test_tagging_already_tagged_artifacts(self, blocklist):

          """

          Tests that buildroot_add_artifacts and tag_artifacts do not try to

          tag already tagged artifacts
@@ -380,7 +394,7 @@ 

  

      @patch.object(FakeKojiModuleBuilder, "get_session")

      @patch.object(FakeKojiModuleBuilder, "_get_tagged_nvrs")

-     def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session, db_session):

+     def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session):

          """

          Tests that only tagged artifacts will be untagged

          """
@@ -526,9 +540,10 @@ 

      @pytest.mark.parametrize("custom_whitelist", [False, True])

      @pytest.mark.parametrize("repo_include_all", [False, True])

      def test_buildroot_connect(

-         self, custom_whitelist, blocklist, repo_include_all, db_session

+         self, custom_whitelist, blocklist, repo_include_all

      ):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         db_session.refresh(module_build)

  

          if blocklist:

              mmd = module_build.mmd()
@@ -566,6 +581,7 @@ 

              db_session.commit()

  

          module_build.arches.append(module_build_service.models.ModuleArch(name="i686"))

+         db_session.commit()

  

          builder = FakeKojiModuleBuilder(

              db_session=db_session,
@@ -613,7 +629,7 @@ 

          expected_calls = []

          assert session.packageListBlock.mock_calls == expected_calls

  

-         expected_arches = "x86_64 i686"

+         expected_arches = "i686 x86_64"

  

          expected_calls = [

              mock.call(
@@ -638,8 +654,9 @@ 

          assert session.editTag2.mock_calls == expected_calls

  

      @pytest.mark.parametrize("blocklist", [False, True])

-     def test_buildroot_connect_create_tag(self, blocklist, db_session):

+     def test_buildroot_connect_create_tag(self, blocklist):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         db_session.refresh(module_build)

  

          if blocklist:

              mmd = module_build.mmd()
@@ -675,7 +692,7 @@ 

          assert session.packageListBlock.mock_calls == expected_calls

  

      @pytest.mark.parametrize("scratch", [False, True])

-     def test_buildroot_connect_create_target(self, scratch, db_session):

+     def test_buildroot_connect_create_target(self, scratch):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

  

          if scratch:
@@ -710,7 +727,7 @@ 

          assert session.createBuildTarget.mock_calls == expected_calls

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_built_rpms_in_module_build(self, ClientSession, db_session):

+     def test_get_built_rpms_in_module_build(self, ClientSession):

          session = ClientSession.return_value

          session.listTaggedRPMS.return_value = (

              [
@@ -802,7 +819,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_get_filtered_rpms_on_self_dep(

-         self, ClientSession, br_filtered_rpms, expected, db_session

+         self, ClientSession, br_filtered_rpms, expected

      ):

          session = ClientSession.return_value

          session.listTaggedRPMS.return_value = (
@@ -857,9 +874,12 @@ 

          "cg_enabled,cg_devel_enabled", [(False, False), (True, False), (True, True)]

      )

      @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiContentGenerator")

-     def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled, db_session):

+     def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         db_session.refresh(module_build)

          module_build.state = 2

+         db_session.commit()

+ 

          with patch(

              "module_build_service.config.Config.koji_enable_content_generator",

              new_callable=mock.PropertyMock,
@@ -899,14 +919,14 @@ 

  

      @patch.dict("sys.modules", krbV=MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession, db_session):

+     def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          builder = KojiModuleBuilder(db_session, "owner", module_build, conf, "module-tag", [])

          builder.koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_module_build_arches(self, ClientSession, db_session):

+     def test_get_module_build_arches(self, ClientSession):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          arches = "x86_64 i686 ppc64le aarch64 s390x"

          session = ClientSession.return_value
@@ -961,29 +981,28 @@ 

  

      @patch("tempfile.mkdtemp")

      @patch("module_build_service.builder.KojiModuleBuilder.execute_cmd")

-     def _build_srpm(self, db_session, execute_cmd, mkdtemp):

+     def _build_srpm(self, execute_cmd, mkdtemp):

          module_build = make_module_in_db(

              "{name}:{stream}:{version}:{context}".format(**self.module_nsvc),

-             xmd=self.xmd,

-             db_session=db_session)

+             xmd=self.xmd)

  

          mkdtemp.return_value = self.tmp_srpm_build_dir

          return KojiModuleBuilder.get_disttag_srpm("disttag", module_build)

  

-     def test_return_srpm_file(self, db_session):

-         srpm_file = self._build_srpm(db_session)

+     def test_return_srpm_file(self):

+         srpm_file = self._build_srpm()

          assert self.expected_srpm_file == srpm_file

  

-     def test_filtered_rpms_are_added(self, db_session):

-         self._build_srpm(db_session)

+     def test_filtered_rpms_are_added(self):

+         self._build_srpm()

  

          with open(self.spec_file, "r") as f:

              content = f.read()

          for nevr in ["baz-devel-0:0.1-6.fc28", "baz-doc-0:0.1-6.fc28"]:

              assert KojiModuleBuilder.format_conflicts_line(nevr) + "\n" in content

  

-     def test_ursine_rpms_are_added(self, db_session):

-         self._build_srpm(db_session)

+     def test_ursine_rpms_are_added(self):

+         self._build_srpm()

  

          with open(self.spec_file, "r") as f:

              content = f.read()

@@ -10,6 +10,7 @@ 

  import kobo.rpmlib

  

  from module_build_service import conf

+ from module_build_service.db_session import db_session

  from module_build_service.models import ModuleBuild, ComponentBuild

  from module_build_service.builder.MockModuleBuilder import MockModuleBuilder

  from module_build_service.utils import import_fake_base_module, mmd_to_str, load_mmd
@@ -113,7 +114,7 @@ 

          return module

  

      @mock.patch("module_build_service.conf.system", new="mock")

-     def test_createrepo_filter_last_batch(self, db_session):

+     def test_createrepo_filter_last_batch(self):

          module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])

  

          builder = MockModuleBuilder(
@@ -140,7 +141,7 @@ 

              assert "ed" not in rpm_names

  

      @mock.patch("module_build_service.conf.system", new="mock")

-     def test_createrepo_not_last_batch(self, db_session):

+     def test_createrepo_not_last_batch(self):

          module = self._create_module_with_filters(db_session, 2, koji.BUILD_STATES["COMPLETE"])

  

          builder = MockModuleBuilder(
@@ -165,7 +166,7 @@ 

              assert "ed" in rpm_names

  

      @mock.patch("module_build_service.conf.system", new="mock")

-     def test_createrepo_empty_rmp_list(self, db_session):

+     def test_createrepo_empty_rmp_list(self):

          module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])

  

          builder = MockModuleBuilder(
@@ -199,17 +200,17 @@ 

          "module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config"

      )

      def test_buildroot_add_repos(

-         self, write_config, load_config, patched_open, base_module_repofiles, db_session

+         self, write_config, load_config, patched_open, base_module_repofiles

      ):

-         import_fake_base_module(db_session, "platform:f29:1:000000")

+         import_fake_base_module("platform:f29:1:000000")

  

          platform = ModuleBuild.get_last_build_in_stream(db_session, "platform", "f29")

          module_deps = [{

              "requires": {"platform": ["f29"]},

              "buildrequires": {"platform": ["f29"]},

          }]

-         foo = make_module_in_db("foo:1:1:1", module_deps, db_session=db_session)

-         app = make_module_in_db("app:1:1:1", module_deps, db_session=db_session)

+         foo = make_module_in_db("foo:1:1:1", module_deps)

+         app = make_module_in_db("app:1:1:1", module_deps)

  

          patched_open.side_effect = [

              mock.mock_open(read_data="[fake]\nrepofile 1\n").return_value,

@@ -10,6 +10,7 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.repos  # noqa

  from module_build_service import models, conf, build_logs, Modulemd

+ from module_build_service.db_session import db_session

  from module_build_service.utils.general import mmd_to_str

  

  from mock import patch, Mock, call, mock_open
@@ -34,7 +35,7 @@ 

  class TestBuild:

      def setup_method(self, test_method):

          init_data(1, contexts=True)

-         module = models.ModuleBuild.query.filter_by(id=2).one()

+         module = models.ModuleBuild.get_by_id(db_session, 2)

          module.cg_build_koji_tag = "f27-module-candidate"

          self.cg = KojiContentGenerator(module, conf)

  
@@ -50,8 +51,7 @@ 

  

          # Ensure that there is no build log from other tests

          try:

-             with models.make_db_session(conf) as db_session:

-                 file_path = build_logs.path(db_session, self.cg.module)

+             file_path = build_logs.path(db_session, self.cg.module)

              os.remove(file_path)

          except OSError:

              pass
@@ -68,8 +68,7 @@ 

          import moksha.hub.reactor  # noqa

  

          try:

-             with models.make_db_session(conf) as db_session:

-                 file_path = build_logs.path(db_session, self.cg.module)

+             file_path = build_logs.path(db_session, self.cg.module)

              os.remove(file_path)

          except OSError:

              pass
@@ -116,8 +115,7 @@ 

              expected_output = json.load(expected_output_file)

  

          # create the build.log

-         with models.make_db_session(conf) as db_session:

-             build_logs.start(db_session, self.cg.module)

+         build_logs.start(db_session, self.cg.module)

          build_logs.stop(self.cg.module)

  

          self.cg.devel = devel

file modified
+3 -2
@@ -6,6 +6,7 @@ 

  import tempfile

  

  from module_build_service import log, models

+ from module_build_service.db_session import db_session

  from module_build_service.logger import ModuleBuildLogs

  from module_build_service.scheduler.consumer import MBSConsumer

  from tests import init_data
@@ -39,7 +40,7 @@ 

          MBSConsumer.current_module_build_id = None

          shutil.rmtree(self.base)

  

-     def test_module_build_logs(self, db_session):

+     def test_module_build_logs(self):

          """

          Tests that ModuleBuildLogs is logging properly to build log file.

          """
@@ -98,7 +99,7 @@ 

              data = f.read()

              assert data.find("ignore this test msg3") == -1

  

-     def test_module_build_logs_name_format(self, db_session):

+     def test_module_build_logs_name_format(self):

          build = models.ModuleBuild.get_by_id(db_session, 2)

  

          log1 = ModuleBuildLogs("/some/path", "build-{id}.log")

file modified
+5 -4
@@ -4,6 +4,7 @@ 

  from mock import patch

  

  from module_build_service import app, models

+ from module_build_service.db_session import db_session

  from module_build_service.manage import manager_wrapper, retire

  from module_build_service.models import BUILD_STATES, ModuleBuild

  from module_build_service.utils.general import deps_to_dict
@@ -44,7 +45,7 @@ 

          ),

      )

      @patch("module_build_service.manage.prompt_bool")

-     def test_retire_build(self, prompt_bool, overrides, identifier, changed_count, db_session):

+     def test_retire_build(self, prompt_bool, overrides, identifier, changed_count):

          prompt_bool.return_value = True

  

          module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
@@ -83,7 +84,7 @@ 

      )

      @patch("module_build_service.manage.prompt_bool")

      def test_retire_build_confirm_prompt(

-         self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected, db_session

+         self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected

      ):

          prompt_bool.return_value = confirm_prompt

  
@@ -149,7 +150,7 @@ 

              app.config["SQLALCHEMY_DATABASE_URI"] = original_db_uri

  

      @patch("module_build_service.scheduler.main")

-     def test_set_stream(self, main, db_session):

+     def test_set_stream(self, main):

          cli_cmd = [

              "mbs-manager", "build_module_locally",

              "--set-stream", "platform:f28",
@@ -193,7 +194,7 @@ 

          args, _ = logging.error.call_args_list[1]

          assert "Use '-s module_name:module_stream' to choose the stream" == args[0]

  

-     def test_module_build_failed(self, db_session):

+     def test_module_build_failed(self):

          cli_cmd = [

              "mbs-manager", "build_module_locally",

              "--set-stream", "platform:f28",

@@ -4,6 +4,7 @@ 

  

  from mock import patch

  from module_build_service import conf

+ from module_build_service.db_session import db_session

  from module_build_service.models import ComponentBuild, ComponentBuildTrace, ModuleBuild

  from module_build_service.utils.general import mmd_to_str, load_mmd

  from tests import init_data as init_data_contexts, clean_database, read_staged_data
@@ -13,7 +14,7 @@ 

  @pytest.mark.usefixtures("model_tests_init_data")

  class TestModels:

  

-     def test_app_sqlalchemy_events(self, db_session):

+     def test_app_sqlalchemy_events(self):

          component_build = ComponentBuild(

              package="before_models_committed",

              scmurl="git://pkgs.domain.local/rpms/before_models_committed?"
@@ -40,7 +41,7 @@ 

          assert component_builds_trace.state_reason is None

          assert component_builds_trace.task_id == 999999999

  

-     def test_context_functions(self, db_session):

+     def test_context_functions(self):

          """ Test that the build_context, runtime_context, and context hashes are correctly

          determined"""

          db_session.commit()
@@ -55,7 +56,7 @@ 

          assert build.context == "3ee22b28"

          assert build.build_context_no_bms == "089df24993c037e10174f3fa7342ab4dc191a4d4"

  

-     def test_siblings_property(self, db_session):

+     def test_siblings_property(self):

          """ Tests that the siblings property returns the ID of all modules with

          the same name:stream:version

          """
@@ -93,7 +94,7 @@ 

  

  

  class TestModelsGetStreamsContexts:

-     def test_get_last_build_in_all_streams(self, db_session):

+     def test_get_last_build_in_all_streams(self):

          init_data_contexts(contexts=True)

          builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")

          builds = sorted([
@@ -102,7 +103,7 @@ 

          db_session.commit()

          assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]

  

-     def test_get_last_build_in_all_stream_last_version(self, db_session):

+     def test_get_last_build_in_all_stream_last_version(self):

          init_data_contexts(contexts=False)

          builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")

          builds = [
@@ -111,7 +112,7 @@ 

          db_session.commit()

          assert builds == ["nginx:1:11"]

  

-     def test_get_last_builds_in_stream(self, db_session):

+     def test_get_last_builds_in_stream(self):

          init_data_contexts(contexts=True)

          builds = ModuleBuild.get_last_builds_in_stream(db_session, "nginx", "1")

          builds = [
@@ -121,7 +122,7 @@ 

          db_session.commit()

          assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]

  

-     def test_get_last_builds_in_stream_version_lte(self, db_session):

+     def test_get_last_builds_in_stream_version_lte(self):

          init_data_contexts(1, multiple_stream_versions=True)

          builds = ModuleBuild.get_last_builds_in_stream_version_lte(db_session, "platform", 290100)

          builds = {
@@ -131,7 +132,7 @@ 

          db_session.commit()

          assert builds == {"platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"}

  

-     def test_get_last_builds_in_stream_version_lte_different_versions(self, db_session):

+     def test_get_last_builds_in_stream_version_lte_different_versions(self):

          """

          Tests that get_last_builds_in_stream_version_lte works in case the

          name:stream_ver modules have different versions.
@@ -139,26 +140,19 @@ 

          clean_database(False)

  

          make_module_in_db(

-             "platform:f29.1.0:10:old_version",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.1.0:10:old_version", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.1.0:15:c11.another",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.1.0:15:c11.another", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.1.0:15:c11",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.1.0:15:c11", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.2.0:0:old_version",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.2.0:0:old_version", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.2.0:1:c11",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.2.0:1:c11", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.3.0:15:old_version",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.3.0:15:old_version", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.3.0:20:c11",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.3.0:20:c11", virtual_streams=["f29"])

  

          builds = ModuleBuild.get_last_builds_in_stream_version_lte(

              db_session, "platform", 290200)
@@ -173,30 +167,26 @@ 

              "platform:f29.2.0:1:c11",

          }

  

-     def test_get_module_count(self, db_session):

+     def test_get_module_count(self):

          clean_database(False)

-         make_module_in_db("platform:f29.1.0:10:c11", db_session=db_session)

-         make_module_in_db("platform:f29.1.0:10:c12", db_session=db_session)

+         make_module_in_db("platform:f29.1.0:10:c11")

+         make_module_in_db("platform:f29.1.0:10:c12")

  

          count = ModuleBuild.get_module_count(db_session, name="platform")

          db_session.commit()

          assert count == 2

  

-     def test_add_virtual_streams_filter(self, db_session):

+     def test_add_virtual_streams_filter(self):

          clean_database(False)

  

          make_module_in_db(

-             "platform:f29.1.0:10:c1",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.1.0:10:c1", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.1.0:15:c1",

-             db_session=db_session, virtual_streams=["f29"])

+             "platform:f29.1.0:15:c1", virtual_streams=["f29"])

          make_module_in_db(

-             "platform:f29.3.0:15:old_version",

-             db_session=db_session, virtual_streams=["f28", "f29"])

+             "platform:f29.3.0:15:old_version", virtual_streams=["f28", "f29"])

          make_module_in_db(

-             "platform:f29.3.0:20:c11",

-             db_session=db_session, virtual_streams=["f30"])

+             "platform:f29.3.0:20:c11", virtual_streams=["f30"])

  

          query = db_session.query(ModuleBuild).filter_by(name="platform")

          query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])

file modified
+19 -12
@@ -7,6 +7,7 @@ 

  import module_build_service.config as mbs_config

  import module_build_service.monitor

  from module_build_service import models

+ from module_build_service.db_session import db_session

  from conf.config import TestConfiguration

  

  from six.moves import reload_module
@@ -49,14 +50,17 @@ 

  

  @mock.patch("module_build_service.monitor.builder_failed_counter.labels")

  @mock.patch("module_build_service.monitor.builder_success_counter.inc")

- def test_monitor_state_changing_success(succ_cnt, failed_cnt, db_session):

+ def test_monitor_state_changing_success(succ_cnt, failed_cnt):

      conf = mbs_config.Config(TestConfiguration)

      b = make_module_in_db(

-         "pkg:0.1:1:c1", [{

-             "requires": {"platform": ["el8"]},

-             "buildrequires": {"platform": ["el8"]},

-         }],

-         db_session=db_session)

+         "pkg:0.1:1:c1",

+         [

+             {

+                 "requires": {"platform": ["el8"]},

+                 "buildrequires": {"platform": ["el8"]},

+             }

+         ],

+     )

      b.transition(db_session, conf, models.BUILD_STATES["wait"])

      b.transition(db_session, conf, models.BUILD_STATES["build"])

      b.transition(db_session, conf, models.BUILD_STATES["done"])
@@ -67,15 +71,18 @@ 

  

  @mock.patch("module_build_service.monitor.builder_failed_counter.labels")

  @mock.patch("module_build_service.monitor.builder_success_counter.inc")

- def test_monitor_state_changing_failure(succ_cnt, failed_cnt, db_session):

+ def test_monitor_state_changing_failure(succ_cnt, failed_cnt):

      failure_type = "user"

      conf = mbs_config.Config(TestConfiguration)

      b = make_module_in_db(

-         "pkg:0.1:1:c1", [{

-             "requires": {"platform": ["el8"]},

-             "buildrequires": {"platform": ["el8"]},

-         }],

-         db_session=db_session)

+         "pkg:0.1:1:c1",

+         [

+             {

+                 "requires": {"platform": ["el8"]},

+                 "buildrequires": {"platform": ["el8"]},

+             }

+         ],

+     )

      b.transition(db_session, conf, models.BUILD_STATES["wait"])

      b.transition(db_session, conf, models.BUILD_STATES["build"])

      b.transition(db_session, conf, models.BUILD_STATES["failed"], failure_type=failure_type)

file modified
+16 -19
@@ -11,13 +11,14 @@ 

  from module_build_service.utils import import_mmd, mmd_to_str, load_mmd

  from module_build_service.models import ModuleBuild

  from module_build_service.errors import UnprocessableEntity

+ from module_build_service.db_session import db_session

  import tests

  

  

  @pytest.mark.usefixtures("reuse_component_init_data")

  class TestDBModule:

  

-     def test_get_buildrequired_modulemds(self, db_session):

+     def test_get_buildrequired_modulemds(self):

          mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

  
@@ -52,9 +53,7 @@ 

          assert nsvcs == {"testmodule:master:20170109091357:123"}

  

      @pytest.mark.parametrize("stream_versions", [False, True])

-     def test_get_compatible_base_module_modulemds_stream_versions(

-         self, stream_versions, db_session

-     ):

+     def test_get_compatible_base_module_modulemds_stream_versions(self, stream_versions):

          tests.init_data(1, multiple_stream_versions=True)

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          platform = db_session.query(ModuleBuild).filter_by(name="platform", stream="f29.1.0").one()
@@ -73,7 +72,7 @@ 

              }

  

      @pytest.mark.parametrize("empty_buildrequires", [False, True])

-     def test_get_module_build_dependencies(self, empty_buildrequires, db_session):

+     def test_get_module_build_dependencies(self, empty_buildrequires):

          """

          Tests that the buildrequires of testmodule are returned

          """
@@ -95,7 +94,7 @@ 

              "testmodule", "master", "20170109091357", "78e4a6fd").keys()

          assert set(result) == expected

  

-     def test_get_module_build_dependencies_recursive(self, db_session):

+     def test_get_module_build_dependencies_recursive(self):

          """

          Tests that the buildrequires are returned when it is two layers deep

          """
@@ -136,13 +135,11 @@ 

          new_callable=PropertyMock,

          return_value=tests.staged_data_filename("local_builds"),

      )

-     def test_get_module_build_dependencies_recursive_requires(

-         self, resultdir, conf_system, db_session

-     ):

+     def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system):

          """

          Tests that it returns the requires of the buildrequires recursively

          """

-         utils.load_local_builds(db_session, ["platform", "parent", "child", "testmodule"])

+         utils.load_local_builds(["platform", "parent", "child", "testmodule"])

  

          build = models.ModuleBuild.local_modules(db_session, "child", "master")

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
@@ -153,7 +150,7 @@ 

          expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]

          assert set(result) == set(expected)

  

-     def test_resolve_requires(self, db_session):

+     def test_resolve_requires(self):

          build = models.ModuleBuild.get_by_id(db_session, 2)

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.resolve_requires(
@@ -170,7 +167,7 @@ 

              }

          }

  

-     def test_resolve_requires_exception(self, db_session):

+     def test_resolve_requires_exception(self):

          build = models.ModuleBuild.get_by_id(db_session, 2)

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          with pytest.raises(UnprocessableEntity):
@@ -178,7 +175,7 @@ 

                  [":".join(["abcdefghi", build.stream, build.version, build.context])]

              )

  

-     def test_resolve_requires_siblings(self, db_session):

+     def test_resolve_requires_siblings(self):

          tests.clean_database()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          mmd = load_mmd(tests.read_staged_data("formatted_testmodule"))
@@ -205,7 +202,7 @@ 

  

          db_session.commit()

  

-     def test_resolve_profiles(self, db_session):

+     def test_resolve_profiles(self):

          """

          Tests that the profiles get resolved recursively

          """
@@ -259,30 +256,30 @@ 

          new_callable=PropertyMock,

          return_value=tests.staged_data_filename("local_builds")

      )

-     def test_resolve_profiles_local_module(self, local_builds, conf_system, db_session):

+     def test_resolve_profiles_local_module(self, local_builds, conf_system):

          """

          Test that profiles get resolved recursively on local builds

          """

-         utils.load_local_builds(db_session, ["platform"])

+         utils.load_local_builds(["platform"])

          mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

          expected = {"buildroot": {"foo"}, "srpm-buildroot": {"bar"}}

          assert result == expected

  

-     def test_get_latest_with_virtual_stream(self, db_session):

+     def test_get_latest_with_virtual_stream(self):

          tests.init_data(1, multiple_stream_versions=True)

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          mmd = resolver.get_latest_with_virtual_stream("platform", "f29")

          assert mmd

          assert mmd.get_stream_name() == "f29.2.0"

  

-     def test_get_latest_with_virtual_stream_none(self, db_session):

+     def test_get_latest_with_virtual_stream_none(self):

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")

          assert not mmd

  

-     def test_get_module_count(self, db_session):

+     def test_get_module_count(self):

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          count = resolver.get_module_count(name="platform", stream="f28")

          assert count == 1

@@ -5,15 +5,16 @@ 

  from datetime import datetime

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd

- from module_build_service.models import ModuleBuild, BUILD_STATES

  import tests

+ from module_build_service.db_session import db_session

+ from module_build_service.models import ModuleBuild, BUILD_STATES

+ from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd

  

  

  @pytest.mark.usefixtures("reuse_component_init_data")

  class TestLocalResolverModule:

  

-     def _create_test_modules(self, db_session, koji_tag_with_modules="foo-test"):

+     def _create_test_modules(self, koji_tag_with_modules="foo-test"):

          mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

  
@@ -51,8 +52,8 @@ 

              db_session.add(build)

          db_session.commit()

  

-     def test_get_buildrequired_modulemds_fallback_to_db_resolver(self, db_session):

-         self._create_test_modules(db_session, koji_tag_with_modules=None)

+     def test_get_buildrequired_modulemds_fallback_to_db_resolver(self):

+         self._create_test_modules(koji_tag_with_modules=None)

          platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

  

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
@@ -64,7 +65,7 @@ 

              "testmodule:master:20170109091357:7c29193e"}

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_buildrequired_modulemds_name_not_tagged(self, ClientSession, db_session):

+     def test_get_buildrequired_modulemds_name_not_tagged(self, ClientSession):

          koji_session = ClientSession.return_value

          koji_session.getLastEvent.return_value = {"id": 123}

  
@@ -72,7 +73,7 @@ 

          koji_session.listTagged.return_value = []

          koji_session.multiCall.return_value = [[]]

  

-         self._create_test_modules(db_session)

+         self._create_test_modules()

          platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

          result = resolver.get_buildrequired_modulemds("testmodule", "master", platform.mmd())
@@ -82,7 +83,7 @@ 

              "foo-test", inherit=True, package="testmodule", type="module", event=123)

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_buildrequired_modulemds_multiple_streams(self, ClientSession, db_session):

+     def test_get_buildrequired_modulemds_multiple_streams(self, ClientSession):

          koji_session = ClientSession.return_value

  

          # We will ask for testmodule:master, but there is also testmodule:2 in a tag.
@@ -99,7 +100,7 @@ 

          koji_session.multiCall.return_value = [

              [build] for build in koji_session.listTagged.return_value]

  

-         self._create_test_modules(db_session)

+         self._create_test_modules()

          platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

          result = resolver.get_buildrequired_modulemds("testmodule", "master", platform.mmd())
@@ -108,7 +109,7 @@ 

          assert nsvcs == {"testmodule:master:20170109091357:7c29193d"}

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_buildrequired_modulemds_tagged_but_not_in_db(self, ClientSession, db_session):

+     def test_get_buildrequired_modulemds_tagged_but_not_in_db(self, ClientSession):

          koji_session = ClientSession.return_value

  

          # We will ask for testmodule:2, but it is not in database, so it should raise
@@ -126,7 +127,7 @@ 

          koji_session.multiCall.return_value = [

              [build] for build in koji_session.listTagged.return_value]

  

-         self._create_test_modules(db_session)

+         self._create_test_modules()

          platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

          expected_error = ("Module testmodule:2:820181219174508:9edba152 is tagged in the "
@@ -135,8 +136,7 @@ 

              resolver.get_buildrequired_modulemds("testmodule", "2", platform.mmd())

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_buildrequired_modulemds_multiple_versions_contexts(

-             self, ClientSession, db_session):

+     def test_get_buildrequired_modulemds_multiple_versions_contexts(self, ClientSession):

          koji_session = ClientSession.return_value

  

          # We will ask for testmodule:2, but it is not in database, so it should raise
@@ -162,7 +162,7 @@ 

          koji_session.multiCall.return_value = [

              [build] for build in koji_session.listTagged.return_value]

  

-         self._create_test_modules(db_session)

+         self._create_test_modules()

          platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

          result = resolver.get_buildrequired_modulemds("testmodule", "master", platform.mmd())
@@ -173,7 +173,7 @@ 

              "testmodule:master:20170109091357:7c29193e"}

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_buildrequired_modules(self, ClientSession, db_session):

+     def test_get_buildrequired_modules(self, ClientSession):

          koji_session = ClientSession.return_value

  

          # We will ask for testmodule:master, but there is also testmodule:2 in a tag.
@@ -190,7 +190,7 @@ 

          koji_session.multiCall.return_value = [

              [build] for build in koji_session.listTagged.return_value]

  

-         self._create_test_modules(db_session)

+         self._create_test_modules()

          platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

          result = resolver.get_buildrequired_modules("testmodule", "master", platform.mmd())
@@ -199,7 +199,7 @@ 

          assert nvrs == {"testmodule-master-20170109091357.7c29193d"}

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_filter_inherited(self, ClientSession, db_session):

+     def test_filter_inherited(self, ClientSession):

          koji_session = ClientSession.return_value

  

          koji_session.getFullInheritance.return_value = [
@@ -230,7 +230,7 @@ 

              "testmodule-2-20180109091357.7c29193d"}

  

      @patch("module_build_service.builder.KojiModuleBuilder.koji_multicall_map")

-     def test_filter_based_on_real_stream_name(self, koji_multicall_map, db_session):

+     def test_filter_based_on_real_stream_name(self, koji_multicall_map):

          koji_session = MagicMock()

          koji_multicall_map.return_value = [

              {"build_id": 124, "extra": {"typeinfo": {"module": {"stream": "foo-test"}}}},
@@ -253,8 +253,7 @@ 

          assert build_ids == {124, 126, 127}

  

      @patch("module_build_service.builder.KojiModuleBuilder.koji_multicall_map")

-     def test_filter_based_on_real_stream_name_multicall_error(

-             self, koji_multicall_map, db_session):

+     def test_filter_based_on_real_stream_name_multicall_error(self, koji_multicall_map):

          koji_session = MagicMock()

          koji_multicall_map.return_value = None

  
@@ -267,7 +266,7 @@ 

          with pytest.raises(RuntimeError, match=expected_error):

              resolver._filter_based_on_real_stream_name(koji_session, builds, "foo-test")

  

-     def test_get_compatible_base_module_modulemds_fallback_to_dbresolver(self, db_session):

+     def test_get_compatible_base_module_modulemds_fallback_to_dbresolver(self):

          tests.init_data(1, multiple_stream_versions=True)

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

          platform = db_session.query(ModuleBuild).filter_by(name="platform", stream="f29.1.0").one()
@@ -278,7 +277,7 @@ 

  

          assert len(result) == 2

  

-     def test_get_compatible_base_module_modulemds(self, db_session):

+     def test_get_compatible_base_module_modulemds(self):

          tests.init_data(1, multiple_stream_versions=True)

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")

  
@@ -288,6 +287,7 @@ 

          platform_xmd["mbs"]["koji_tag_with_modules"] = "module-f29-build"

          platform_mmd.set_xmd(platform_xmd)

          platform.modulemd = mmd_to_str(platform_mmd)

+         db_session.commit()

  

          result = resolver.get_compatible_base_module_modulemds(

              platform_mmd, stream_version_lte=True, virtual_streams=["f29"],

@@ -4,15 +4,16 @@ 

  from datetime import datetime

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd

+ from module_build_service.db_session import db_session

  from module_build_service.models import ModuleBuild

+ from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd

  import tests

  

  

  @pytest.mark.usefixtures("reuse_component_init_data")

  class TestLocalResolverModule:

  

-     def test_get_buildrequired_modulemds(self, db_session):

+     def test_get_buildrequired_modulemds(self):

          mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f8")

          import_mmd(db_session, mmd)

file modified
+15 -14
@@ -4,6 +4,7 @@ 

  

  import module_build_service.resolver as mbs_resolver

  import module_build_service.utils

+ from module_build_service.db_session import db_session

  from module_build_service.utils.general import mmd_to_str

  import module_build_service.models

  import tests
@@ -11,7 +12,7 @@ 

  

  class TestMBSModule:

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e, db_session):

+     def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):

          """ Tests for querying a module from mbs """

          mock_res = Mock()

          mock_res.ok.return_value = True
@@ -54,7 +55,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_modulemds_partial(

-         self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed, db_session

+         self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed

      ):

          """ Test for querying MBS without the context of a module """

  
@@ -106,7 +107,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_build_dependencies(

-         self, mock_session, platform_mmd, testmodule_mmd_9c690d0e, db_session

+         self, mock_session, platform_mmd, testmodule_mmd_9c690d0e

      ):

          """

          Tests that we return just direct build-time dependencies of testmodule.
@@ -183,7 +184,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_build_dependencies_empty_buildrequires(

-         self, mock_session, testmodule_mmd_9c690d0e, db_session

+         self, mock_session, testmodule_mmd_9c690d0e

      ):

  

          mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e)
@@ -237,7 +238,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_resolve_profiles(

-         self, mock_session, formatted_testmodule_mmd, platform_mmd, db_session

+         self, mock_session, formatted_testmodule_mmd, platform_mmd

      ):

  

          mock_res = Mock()
@@ -323,11 +324,11 @@ 

          return_value=tests.staged_data_filename("local_builds")

      )

      def test_resolve_profiles_local_module(

-         self, local_builds, conf_system, formatted_testmodule_mmd, db_session

+         self, local_builds, conf_system, formatted_testmodule_mmd

      ):

          tests.clean_database()

          with tests.app.app_context():

-             module_build_service.utils.load_local_builds(db_session, ["platform"])

+             module_build_service.utils.load_local_builds(["platform"])

  

              resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

              result = resolver.resolve_profiles(
@@ -336,7 +337,7 @@ 

              assert result == expected

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_empty_buildrequired_modulemds(self, request_session, db_session):

+     def test_get_empty_buildrequired_modulemds(self, request_session):

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          request_session.get.return_value = Mock(ok=True)

          request_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}
@@ -346,7 +347,7 @@ 

          assert [] == result

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_buildrequired_modulemds(self, mock_session, db_session):

+     def test_get_buildrequired_modulemds(self, mock_session):

          resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          mock_session.get.return_value = Mock(ok=True)

          mock_session.get.return_value.json.return_value = {
@@ -384,7 +385,7 @@ 

          assert "c1" == mmd.get_context()

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_count(self, mock_session, db_session):

+     def test_get_module_count(self, mock_session):

          mock_res = Mock()

          mock_res.ok.return_value = True

          mock_res.json.return_value = {
@@ -403,7 +404,7 @@ 

          )

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd, db_session):

+     def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd):

          mock_res = Mock()

          mock_res.ok.return_value = True

          mock_res.json.return_value = {
@@ -445,11 +446,11 @@ 

          return_value=tests.staged_data_filename("local_builds")

      )

      def test_get_buildrequired_modulemds_local_builds(

-         self, local_builds, conf_system, db_session

+         self, local_builds, conf_system

      ):

          tests.clean_database()

          with tests.app.app_context():

-             module_build_service.utils.load_local_builds(db_session, ["testmodule"])

+             module_build_service.utils.load_local_builds(["testmodule"])

  

              resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

              result = resolver.get_buildrequired_modulemds(
@@ -462,7 +463,7 @@ 

              assert "321" == mmd.get_context()

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_buildrequired_modulemds_kojiresolver(self, mock_session, db_session):

+     def test_get_buildrequired_modulemds_kojiresolver(self, mock_session):

          """

          Test that MBSResolver uses KojiResolver as input when KojiResolver is enabled for

          the base module.

@@ -73,7 +73,7 @@ 

          }

          consumer.consume(msg)

          assert process_message.call_count == 1

-         msg_obj = process_message.call_args[0][1]

+         msg_obj = process_message.call_args[0][0]

          assert isinstance(msg_obj, KojiRepoChange)

          assert msg_obj.msg_id == msg["body"]["msg_id"]

          assert msg_obj.repo_tag == msg["body"]["msg"]["tag"]

@@ -7,6 +7,7 @@ 

  from mock import call, Mock, patch, PropertyMock

  import pytest

  

+ from module_build_service.db_session import db_session

  from module_build_service.errors import UnprocessableEntity

  from module_build_service.models import ModuleBuild

  from module_build_service.scheduler import default_modules
@@ -16,7 +17,7 @@ 

  

  @patch("module_build_service.scheduler.default_modules.handle_collisions_with_base_module_rpms")

  @patch("module_build_service.scheduler.default_modules._get_default_modules")

- def test_add_default_modules(mock_get_dm, mock_hc, db_session):

+ def test_add_default_modules(mock_get_dm, mock_hc):

      """

      Test that default modules present in the database are added, and the others are ignored.

      """
@@ -39,8 +40,8 @@ 

      platform_mmd.set_xmd(platform_xmd)

      platform.modulemd = mmd_to_str(platform_mmd)

  

-     make_module_in_db("python:3:12345:1", base_module=platform, db_session=db_session)

-     make_module_in_db("nodejs:11:2345:2", base_module=platform, db_session=db_session)

+     make_module_in_db("python:3:12345:1", base_module=platform)

+     make_module_in_db("nodejs:11:2345:2", base_module=platform)

      db_session.commit()

  

      mock_get_dm.return_value = {
@@ -48,7 +49,7 @@ 

          "python": "3",

          "ruby": "2.6",

      }

-     defaults_added = default_modules.add_default_modules(db_session, mmd)

+     defaults_added = default_modules.add_default_modules(mmd)

      # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in

      # the database

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform", "python"}
@@ -61,19 +62,19 @@ 

  

  

  @patch("module_build_service.scheduler.default_modules._get_default_modules")

- def test_add_default_modules_not_linked(mock_get_dm, db_session):

+ def test_add_default_modules_not_linked(mock_get_dm):

      """

      Test that no default modules are added when they aren't linked from the base module.

      """

      clean_database()

      mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}

-     default_modules.add_default_modules(db_session, mmd)

+     default_modules.add_default_modules(mmd)

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}

      mock_get_dm.assert_not_called()

  

  

- def test_add_default_modules_platform_not_available(db_session):

+ def test_add_default_modules_platform_not_available():

      """

      Test that an exception is raised when the platform module that is buildrequired is missing.

  
@@ -84,17 +85,17 @@ 

  

      expected_error = "Failed to retrieve the module platform:f28:3:00000000 from the database"

      with pytest.raises(RuntimeError, match=expected_error):

-         default_modules.add_default_modules(db_session, mmd)

+         default_modules.add_default_modules(mmd)

  

  

  @patch("module_build_service.scheduler.default_modules._get_default_modules")

- def test_add_default_modules_request_failed(mock_get_dm, db_session):

+ def test_add_default_modules_request_failed(mock_get_dm):

      """

      Test that an exception is raised when the call to _get_default_modules failed.

      """

      clean_database()

-     make_module_in_db("python:3:12345:1", db_session=db_session)

-     make_module_in_db("nodejs:11:2345:2", db_session=db_session)

+     make_module_in_db("python:3:12345:1")

+     make_module_in_db("nodejs:11:2345:2")

      mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))

      xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"]

      assert set(xmd_brs.keys()) == {"platform"}
@@ -118,7 +119,7 @@ 

      mock_get_dm.side_effect = ValueError(expected_error)

  

      with pytest.raises(ValueError, match=expected_error):

-         default_modules.add_default_modules(db_session, mmd)

+         default_modules.add_default_modules(mmd)

  

  

  @pytest.mark.parametrize("is_rawhide", (True, False))

@@ -6,6 +6,7 @@ 

  from sqlalchemy import func

  

  from module_build_service import conf

+ from module_build_service.db_session import db_session

  from module_build_service.models import BUILD_STATES, ModuleBuild

  from module_build_service.scheduler.consumer import MBSConsumer

  from module_build_service.scheduler.handlers.greenwave import get_corresponding_module_build
@@ -20,10 +21,10 @@ 

          clean_database()

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession, db_session):

+     def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession):

          ClientSession.return_value.getBuild.return_value = None

  

-         assert get_corresponding_module_build(db_session, "n-v-r") is None

+         assert get_corresponding_module_build("n-v-r") is None

  

      @pytest.mark.parametrize(

          "build_info",
@@ -37,25 +38,23 @@ 

          ],

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_cannot_find_module_build_id_from_build_info(

-         self, ClientSession, build_info, db_session

-     ):

+     def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info):

          ClientSession.return_value.getBuild.return_value = build_info

  

-         assert get_corresponding_module_build(db_session, "n-v-r") is None

+         assert get_corresponding_module_build("n-v-r") is None

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession, db_session):

+     def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession):

          fake_module_build_id, = db_session.query(func.max(ModuleBuild.id)).first()

  

          ClientSession.return_value.getBuild.return_value = {

              "extra": {"typeinfo": {"module": {"module_build_service_id": fake_module_build_id + 1}}}

          }

  

-         assert get_corresponding_module_build(db_session, "n-v-r") is None

+         assert get_corresponding_module_build("n-v-r") is None

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_find_the_module_build(self, ClientSession, db_session):

+     def test_find_the_module_build(self, ClientSession):

          expected_module_build = (

              db_session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()

          )
@@ -64,7 +63,7 @@ 

              "extra": {"typeinfo": {"module": {"module_build_service_id": expected_module_build.id}}}

          }

  

-         build = get_corresponding_module_build(db_session, "n-v-r")

+         build = get_corresponding_module_build("n-v-r")

  

          assert expected_module_build.id == build.id

          assert expected_module_build.name == build.name
@@ -74,9 +73,9 @@ 

      """Test handler decision_update"""

  

      @patch("module_build_service.scheduler.handlers.greenwave.log")

-     def test_decision_context_is_not_match(self, log, db_session):

+     def test_decision_context_is_not_match(self, log):

          msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing")

-         decision_update(conf, db_session, msg)

+         decision_update(conf, msg)

          log.debug.assert_called_once_with(

              'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"',

              "msg-id-1",
@@ -84,14 +83,14 @@ 

          )

  

      @patch("module_build_service.scheduler.handlers.greenwave.log")

-     def test_not_satisfy_policies(self, log, db_session):

+     def test_not_satisfy_policies(self, log):

          msg = Mock(

              msg_id="msg-id-1",

              decision_context="test_dec_context",

              policies_satisfied=False,

              subject_identifier="pkg-0.1-1.c1",

          )

-         decision_update(conf, db_session, msg)

+         decision_update(conf, msg)

          log.debug.assert_called_once_with(

              "Skip to handle module build %s because it has not satisfied Greenwave policies.",

              msg.subject_identifier,
@@ -99,16 +98,19 @@ 

  

      @patch("module_build_service.messaging.publish")

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_transform_from_done_to_ready(self, ClientSession, publish, db_session):

+     def test_transform_from_done_to_ready(self, ClientSession, publish):

          clean_database()

  

          # This build should be queried and transformed to ready state

          module_build = make_module_in_db(

-             "pkg:0.1:1:c1", [{

-                 "requires": {"platform": ["el8"]},

-                 "buildrequires": {"platform": ["el8"]},

-             }],

-             db_session=db_session)

+             "pkg:0.1:1:c1",

+             [

+                 {

+                     "requires": {"platform": ["el8"]},

+                     "buildrequires": {"platform": ["el8"]},

+                 }

+             ],

+         )

          module_build.transition(

              db_session, conf, BUILD_STATES["done"], "Move to done directly for running test."

          )
@@ -139,6 +141,7 @@ 

          consumer = MBSConsumer(hub)

          consumer.consume(msg)

  

+         db_session.add(module_build)

          # Load module build again to check its state is moved correctly

          db_session.refresh(module_build)

          assert BUILD_STATES["ready"] == module_build.state

@@ -9,7 +9,8 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.modules

  from module_build_service import build_logs

- from module_build_service.models import make_db_session, ModuleBuild

+ from module_build_service.db_session import db_session

+ from module_build_service.models import ModuleBuild

  from module_build_service.utils.general import mmd_to_str, load_mmd

  

  
@@ -22,14 +23,12 @@ 

          mmd = mmd.copy("testmodule", "1")

          scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77"

          clean_database()

-         with make_db_session(conf) as session:

-             ModuleBuild.create(

-                 session, conf, "testmodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

+         ModuleBuild.create(

+             db_session, conf, "testmodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

  

      def teardown_method(self, test_method):

          try:

-             with make_db_session(conf) as db_session:

-                 path = build_logs.path(db_session, 1)

+             path = build_logs.path(db_session, 1)

              os.remove(path)

          except Exception:

              pass
@@ -75,7 +74,7 @@ 

              msg_id=None, module_build_id=2, module_build_state="init"

          )

  

-         self.fn(config=conf, db_session=db_session, msg=msg)

+         self.fn(config=conf, msg=msg)

  

          build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the wait state
@@ -89,7 +88,7 @@ 

          hcwbmr.assert_called_once()

          return build

  

-     def test_init_called_twice(self, db_session):

+     def test_init_called_twice(self):

          build = self.init_basic(db_session)

          old_component_builds = len(build.component_builds)

          old_mmd = load_mmd(build.modulemd)
@@ -108,7 +107,7 @@ 

  

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

-     def test_init_scm_not_available(self, get_build_arches, mocked_scm, db_session):

+     def test_init_scm_not_available(self, get_build_arches, mocked_scm):

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4",

              get_latest_raise=True,
@@ -117,7 +116,7 @@ 

  

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=2, module_build_state="init")

-         self.fn(config=conf, db_session=db_session, msg=msg)

+         self.fn(config=conf, msg=msg)

  

          build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the failed state
@@ -132,7 +131,7 @@ 

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

      def test_init_includedmodule(

-         self, get_build_arches, mocked_scm, mocked_mod_allow_repo, db_session

+         self, get_build_arches, mocked_scm, mocked_mod_allow_repo

      ):

          FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])

          includedmodules_yml_path = read_staged_data("includedmodules")
@@ -144,7 +143,7 @@ 

              db_session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=3, module_build_state="init")

-         self.fn(config=conf, db_session=db_session, msg=msg)

+         self.fn(config=conf, msg=msg)

          build = ModuleBuild.get_by_id(db_session, 3)

          assert build.state == 1

          assert build.name == "includemodule"
@@ -170,7 +169,7 @@ 

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

      def test_init_when_get_latest_raises(

-             self, get_build_arches, mocked_scm, mocked_from_module_event, db_session):

+             self, get_build_arches, mocked_scm, mocked_from_module_event):

          FakeSCM(

              mocked_scm,

              "testmodule",
@@ -183,7 +182,7 @@ 

          build = ModuleBuild.get_by_id(db_session, 2)

          mocked_from_module_event.return_value = build

  

-         self.fn(config=conf, db_session=db_session, msg=msg)

+         self.fn(config=conf, msg=msg)

  

          # Query the database again to make sure the build object is updated

          db_session.refresh(build)

@@ -7,10 +7,10 @@ 

  import os

  import koji

  import pytest

- from tests import conf, scheduler_init_data, read_staged_data

+ from tests import conf, scheduler_init_data

  import module_build_service.resolver

  from module_build_service import build_logs, Modulemd

- from module_build_service.utils.general import load_mmd

+ from module_build_service.db_session import db_session

  from module_build_service.models import ComponentBuild, ModuleBuild

  

  base_dir = os.path.dirname(os.path.dirname(__file__))
@@ -18,50 +18,34 @@ 

  

  class TestModuleWait:

      def setup_method(self, test_method):

+         scheduler_init_data()

+ 

          self.config = conf

          self.session = mock.Mock()

          self.fn = module_build_service.scheduler.handlers.modules.wait

  

      def teardown_method(self, test_method):

          try:

-             with module_build_service.models.make_db_session(conf) as db_session:

-                 path = build_logs.path(db_session, 1)

+             path = build_logs.path(db_session, 1)

              os.remove(path)

          except Exception:

              pass

  

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     @patch("module_build_service.models.ModuleBuild.from_module_event")

-     def test_init_basic(self, from_module_event, create_builder):

+     def test_init_basic(self, create_builder):

          builder = mock.Mock()

          builder.get_disttag_srpm.return_value = "some srpm disttag"

          builder.build.return_value = 1234, 1, "", None

          builder.module_build_tag = {"name": "some-tag-build"}

          create_builder.return_value = builder

-         mocked_module_build = mock.Mock()

-         mocked_module_build.name = "foo"

-         mocked_module_build.stream = "stream"

-         mocked_module_build.version = "1"

-         mocked_module_build.context = "1234567"

-         mocked_module_build.state = 1

-         mocked_module_build.id = 1

-         mocked_module_build.json.return_value = {

-             "name": "foo",

-             "stream": "1",

-             "version": 1,

-             "state": "some state",

-             "id": 1,

-         }

-         mocked_module_build.id = 1

-         mocked_module_build.mmd.return_value = load_mmd(read_staged_data("formatted_testmodule"))

-         mocked_module_build.component_builds = []

- 

-         from_module_event.return_value = mocked_module_build

  

+         module_build_id = db_session.query(ModuleBuild).first().id

          msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=1, module_build_state="some state")

+             msg_id=None,

+             module_build_id=module_build_id,

+             module_build_state="some state")

          with patch("module_build_service.resolver.GenericResolver.create"):

-             self.fn(config=self.config, db_session=self.session, msg=msg)

+             self.fn(config=self.config, msg=msg)

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -71,12 +55,11 @@ 

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.resolver.GenericResolver")

      def test_new_repo_called_when_macros_reused(

-         self, generic_resolver, resolver, create_builder, dbg, db_session

+         self, generic_resolver, resolver, create_builder, dbg

      ):

          """

          Test that newRepo is called when module-build-macros build is reused.

          """

-         scheduler_init_data(db_session)

          koji_session = mock.MagicMock()

          koji_session.newRepo.return_value = 123456

  
@@ -101,7 +84,7 @@ 

              msg_id=None, module_build_id=2, module_build_state="some state")

  

          module_build_service.scheduler.handlers.modules.wait(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          koji_session.newRepo.assert_called_once_with("module-123-build")

  
@@ -119,12 +102,11 @@ 

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.resolver.GenericResolver")

      def test_new_repo_not_called_when_macros_not_reused(

-         self, generic_resolver, resolver, create_builder, dbg, db_session

+         self, generic_resolver, resolver, create_builder, dbg

      ):

          """

          Test that newRepo is called everytime for module-build-macros

          """

-         scheduler_init_data(db_session)

          koji_session = mock.MagicMock()

          koji_session.newRepo.return_value = 123456

  
@@ -149,7 +131,7 @@ 

              msg_id=None, module_build_id=2, module_build_state="some state")

  

          module_build_service.scheduler.handlers.modules.wait(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          assert koji_session.newRepo.called

  
@@ -161,14 +143,13 @@ 

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.resolver.GenericResolver")

      def test_set_cg_build_koji_tag_fallback_to_default(

-         self, generic_resolver, resolver, create_builder, dbg, db_session

+         self, generic_resolver, resolver, create_builder, dbg

      ):

          """

          Test that build.cg_build_koji_tag fallbacks to default tag.

          """

          base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")

  

-         scheduler_init_data(db_session)

          koji_session = mock.MagicMock()

          koji_session.newRepo.return_value = 123456

  
@@ -196,7 +177,7 @@ 

              msg_id=None, module_build_id=2, module_build_state="some state")

  

          module_build_service.scheduler.handlers.modules.wait(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          module_build = ModuleBuild.get_by_id(db_session, 2)

          assert module_build.cg_build_koji_tag == "modular-updates-candidate"
@@ -229,14 +210,12 @@ 

          dbg,

          koji_cg_tag_build,

          expected_cg_koji_build_tag,

-         db_session,

      ):

          """

          Test that build.cg_build_koji_tag is set.

          """

          base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")

  

-         scheduler_init_data(db_session)

          koji_session = mock.MagicMock()

          koji_session.newRepo.return_value = 123456

  
@@ -269,7 +248,7 @@ 

                  msg_id=None, module_build_id=2, module_build_state="some state"

              )

              module_build_service.scheduler.handlers.modules.wait(

-                 config=conf, db_session=db_session, msg=msg

+                 config=conf, msg=msg

              )

              module_build = ModuleBuild.get_by_id(db_session, 2)

              assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

@@ -9,6 +9,7 @@ 

  import koji

  from module_build_service.scheduler.producer import MBSProducer

  from module_build_service.messaging import KojiTagChange

+ from module_build_service.db_session import db_session

  import six.moves.queue as queue

  from datetime import datetime, timedelta

  
@@ -39,7 +40,7 @@ 

      @pytest.mark.parametrize("fresh", [True, False])

      @patch("module_build_service.utils.batches.start_build_component")

      def test_process_paused_module_builds(

-         self, start_build_component, create_builder, global_consumer, dbg, fresh, db_session

+         self, start_build_component, create_builder, global_consumer, dbg, fresh

      ):

          """

          Tests general use-case of process_paused_module_builds.
@@ -67,6 +68,9 @@ 

          poller = MBSProducer(hub)

          poller.poll()

  

+         # Poller removes database session after all tasks are done. So, this

+         # db_session is not that db_session working in poller.poll().

I don't understand this line. Do you mean that you are essentially recreating the session to refresh the module build since it was removed by the poller?

I find this comment confusing because it implies that a different database session is used in the poll method. In production that would be true since it runs in a different thread, but when run as a test, it is run in the same thread unless I'm missing something.

In reality, this database session was cleaned up in the poll method and you need to recreate the database session with the context of module_build before refreshing the object. It might just be easier to understand if you did

module_build = models.ModuleBuild.get_by_id(db_session, 3)

instead of

db_session.add(module_build)
# Refresh our module_build object.
db_session.refresh(module_build)

Either way, the comment needs clarification.

+         db_session.add(module_build)

          # Refresh our module_build object.

          db_session.refresh(module_build)

  
@@ -93,7 +97,7 @@ 

      @patch("module_build_service.utils.batches.start_build_component")

      def test_process_paused_module_builds_with_new_repo_task(

          self, start_build_component, create_builder, global_consumer, dbg, task_state,

-         expect_start_build_component, db_session

+         expect_start_build_component

      ):

          """

          Tests general use-case of process_paused_module_builds.
@@ -122,6 +126,7 @@ 

          poller = MBSProducer(hub)

          poller.poll()

  

+         db_session.add(module_build)

          # Refresh our module_build object.

          db_session.refresh(module_build)

  
@@ -141,7 +146,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_retrigger_new_repo_on_failure(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          """

          Tests that we call koji_sesion.newRepo when newRepo task failed.
@@ -176,7 +181,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_trigger_new_repo_when_succeeded(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          """

          Tests that we do not call koji_sesion.newRepo when newRepo task
@@ -206,6 +211,7 @@ 

          poller = MBSProducer(hub)

          poller.poll()

  

+         db_session.add(module_build)

          # Refresh our module_build object.

          db_session.refresh(module_build)

  
@@ -213,7 +219,7 @@ 

          assert module_build.new_repo_task_id == 123456

  

      def test_process_paused_module_builds_waiting_for_repo(

-         self, create_builder, global_consumer, dbg, db_session

+         self, create_builder, global_consumer, dbg

      ):

          """

          Tests that process_paused_module_builds does not start new batch
@@ -238,6 +244,7 @@ 

          poller = MBSProducer(hub)

          poller.poll()

  

+         db_session.add(module_build)

          # Refresh our module_build object.

          db_session.refresh(module_build)

  
@@ -249,7 +256,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_old_build_targets_are_not_associated_with_any_module_builds(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          consumer = mock.MagicMock()

          consumer.incoming = queue.Queue()
@@ -264,14 +271,14 @@ 

  

          hub = mock.MagicMock()

          poller = MBSProducer(hub)

-         poller.delete_old_koji_targets(conf, db_session)

+         poller.delete_old_koji_targets(conf)

  

          koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_dont_delete_base_module_build_target(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -289,14 +296,14 @@ 

  

              hub = mock.MagicMock()

              poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf, db_session)

+             poller.delete_old_koji_targets(conf)

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_dont_delete_build_target_for_unfinished_module_builds(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -316,14 +323,14 @@ 

  

              hub = mock.MagicMock()

              poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf, db_session)

+             poller.delete_old_koji_targets(conf)

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_only_delete_build_target_with_allowed_koji_tag_prefix(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.
@@ -354,7 +361,7 @@ 

              with patch.object(conf, "koji_target_delete_time", new=60):

                  hub = mock.MagicMock()

                  poller = MBSProducer(hub)

-                 poller.delete_old_koji_targets(conf, db_session)

+                 poller.delete_old_koji_targets(conf)

  

              koji_session.deleteBuildTarget.assert_called_once_with(1)

              koji_session.krb_login.assert_called_once()
@@ -362,7 +369,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_cant_delete_build_target_if_not_reach_delete_time(

-         self, ClientSession, create_builder, global_consumer, dbg, db_session

+         self, ClientSession, create_builder, global_consumer, dbg

      ):

          module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.
@@ -389,13 +396,13 @@ 

              # enough for test.

              hub = mock.MagicMock()

              poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf, db_session)

+             poller.delete_old_koji_targets(conf)

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @pytest.mark.parametrize("state", ["init", "wait"])

      def test_process_waiting_module_build(

-         self, create_builder, global_consumer, dbg, state, db_session

+         self, create_builder, global_consumer, dbg, state

      ):

          """ Test that processing old waiting module builds works. """

  
@@ -420,7 +427,7 @@ 

          assert consumer.incoming.qsize() == 0

  

          # Poll :)

-         poller.process_waiting_module_builds(db_session)

+         poller.process_waiting_module_builds()

  

          assert consumer.incoming.qsize() == 1

  
@@ -430,7 +437,7 @@ 

  

      @pytest.mark.parametrize("state", ["init", "wait"])

      def test_process_waiting_module_build_not_old_enough(

-         self, create_builder, global_consumer, dbg, state, db_session

+         self, create_builder, global_consumer, dbg, state

      ):

          """ Test that we do not process young waiting builds. """

  
@@ -455,13 +462,13 @@ 

          assert consumer.incoming.qsize() == 0

  

          # Poll :)

-         poller.process_waiting_module_builds(db_session)

+         poller.process_waiting_module_builds()

  

          # Ensure we did *not* process the 9 minute-old build.

          assert consumer.incoming.qsize() == 0

  

      def test_process_waiting_module_build_none_found(

-         self, create_builder, global_consumer, dbg, db_session

+         self, create_builder, global_consumer, dbg

      ):

          """ Test nothing happens when no module builds are waiting. """

  
@@ -476,12 +483,12 @@ 

          assert consumer.incoming.qsize() == 0

  

          # Poll :)

-         poller.process_waiting_module_builds(db_session)

+         poller.process_waiting_module_builds()

  

          # Ensure we did *not* process any of the non-waiting builds.

          assert consumer.incoming.qsize() == 0

  

-     def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg, db_session):

+     def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg):

          """ Test that one of the two module builds gets to the garbage state when running

          cleanup_stale_failed_builds.

          """
@@ -513,7 +520,7 @@ 

  

          # Ensure the queue is empty before we start

          assert consumer.incoming.qsize() == 0

-         poller.cleanup_stale_failed_builds(conf, db_session)

+         poller.cleanup_stale_failed_builds(conf)

          db_session.refresh(module_build_two)

          # Make sure module_build_one was transitioned to garbage

          assert module_build_one.state == models.BUILD_STATES["garbage"]
@@ -537,7 +544,7 @@ 

          ])

  

      def test_cleanup_stale_failed_builds_no_components(

-         self, create_builder, global_consumer, dbg, db_session

+         self, create_builder, global_consumer, dbg

      ):

          """ Test that a module build without any components built gets to the garbage state when

          running cleanup_stale_failed_builds.
@@ -566,7 +573,7 @@ 

  

          # Ensure the queue is empty before we start

          assert consumer.incoming.qsize() == 0

-         poller.cleanup_stale_failed_builds(conf, db_session)

+         poller.cleanup_stale_failed_builds(conf)

          db_session.refresh(module_build_two)

          # Make sure module_build_two was transitioned to garbage

          assert module_build_two.state == models.BUILD_STATES["garbage"]
@@ -584,7 +591,7 @@ 

          "test_state", [models.BUILD_STATES[state] for state in conf.cleanup_stuck_builds_states]

      )

      def test_cancel_stuck_module_builds(

-         self, create_builder, global_consumer, dbg, test_state, db_session

+         self, create_builder, global_consumer, dbg, test_state

      ):

  

          module_build1 = models.ModuleBuild.get_by_id(db_session, 1)
@@ -612,7 +619,7 @@ 

  

          assert consumer.incoming.qsize() == 0

  

-         poller.cancel_stuck_module_builds(conf, db_session)

+         poller.cancel_stuck_module_builds(conf)

  

          module = db_session.query(models.ModuleBuild).filter_by(state=4).all()

          assert len(module) == 1
@@ -623,8 +630,7 @@ 

      @pytest.mark.parametrize("btime", (True, False))

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_sync_koji_build_tags(

-         self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime,

-         db_session

+         self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime

      ):

          module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.
@@ -660,7 +666,7 @@ 

  

          assert consumer.incoming.qsize() == 0

  

-         poller.sync_koji_build_tags(conf, db_session)

+         poller.sync_koji_build_tags(conf)

  

          assert consumer.incoming.qsize() == len(ret)

  
@@ -683,7 +689,7 @@ 

      @pytest.mark.parametrize("greenwave_result", [True, False])

      @patch("module_build_service.utils.greenwave.Greenwave.check_gating")

      def test_poll_greenwave(

-         self, mock_gw, create_builder, global_consumer, dbg, greenwave_result, db_session

+         self, mock_gw, create_builder, global_consumer, dbg, greenwave_result

      ):

  

          module_build1 = models.ModuleBuild.get_by_id(db_session, 1)
@@ -695,7 +701,7 @@ 

          module_build3 = models.ModuleBuild.get_by_id(db_session, 3)

          module_build3.state = models.BUILD_STATES["init"]

  

-         module_build4 = make_module_in_db("foo:1:1:1", {}, db_session=db_session)

+         module_build4 = make_module_in_db("foo:1:1:1", {})

          module_build4.state = models.BUILD_STATES["done"]

          module_build4.scratch = True

  
@@ -711,7 +717,7 @@ 

  

          mock_gw.return_value = greenwave_result

  

-         poller.poll_greenwave(conf, db_session)

+         poller.poll_greenwave(conf)

  

          mock_gw.assert_called_once()

          modules = models.ModuleBuild.by_state(db_session, "ready")

@@ -5,6 +5,7 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.repos

  import module_build_service.models

+ from module_build_service.db_session import db_session

  from module_build_service.models import ComponentBuild

  from tests import conf, scheduler_init_data

  
@@ -12,16 +13,15 @@ 

  class TestRepoDone:

  

      @mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event")

-     def test_no_match(self, from_repo_done_event, db_session):

+     def test_no_match(self, from_repo_done_event):

          """ Test that when a repo msg hits us and we have no match,

          that we do nothing gracefully.

          """

-         scheduler_init_data(db_session)

+         scheduler_init_data()

          from_repo_done_event.return_value = None

          msg = module_build_service.messaging.KojiRepoChange(

              "no matches for this...", "2016-some-nonexistent-build")

-         module_build_service.scheduler.handlers.repos.done(

-             config=conf, db_session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

  

      @mock.patch(

          "module_build_service.builder.KojiModuleBuilder."
@@ -48,18 +48,17 @@ 

          "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"

      )

      def test_a_single_match(

-         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, db_session

+         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea

      ):

          """ Test that when a repo msg hits us and we have a single match.

          """

-         scheduler_init_data(db_session)

+         scheduler_init_data()

          get_session.return_value = mock.Mock(), "development"

          build_fn.return_value = 1234, 1, "", None

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(

-             config=conf, db_session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -94,12 +93,11 @@ 

          "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"

      )

      def test_a_single_match_finalize(

-         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer,

-         db_session

+         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer

      ):

          """ Test that when a repo msg hits us and we have a single match.

          """

-         scheduler_init_data(db_session, tangerine_state=1)

+         scheduler_init_data(tangerine_state=1)

          get_session.return_value = mock.Mock(), "development"

          build_fn.return_value = 1234, 1, "", None

  
@@ -120,8 +118,7 @@ 

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(

-             config=conf, db_session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

  

          finalizer.assert_called_once()

  
@@ -150,19 +147,18 @@ 

          "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"

      )

      def test_a_single_match_build_fail(

-         self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea, db_session

+         self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea

      ):

          """ Test that when a KojiModuleBuilder.build fails, the build is

          marked as failed with proper state_reason.

          """

-         scheduler_init_data(db_session)

+         scheduler_init_data()

          config.return_value = mock.Mock(), "development"

          build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(

-             config=conf, db_session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -176,11 +172,11 @@ 

          assert component_build.state_reason == "Failed to submit artifact tangerine to Koji"

  

      @mock.patch("module_build_service.scheduler.handlers.repos.log.info")

-     def test_erroneous_regen_repo_received(self, mock_log_info, db_session):

+     def test_erroneous_regen_repo_received(self, mock_log_info):

          """ Test that when an unexpected KojiRepoRegen message is received, the module doesn't

          complete or go to the next build batch.

          """

-         scheduler_init_data(db_session, 1)

+         scheduler_init_data(1)

  

          component_build = db_session.query(ComponentBuild).filter_by(package="tangerine").one()

          component_build.tagged = False
@@ -189,8 +185,7 @@ 

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

  

-         module_build_service.scheduler.handlers.repos.done(

-             config=conf, db_session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

  

          mock_log_info.assert_called_with(

              "Ignoring repo regen, because not all components are tagged."
@@ -218,19 +213,18 @@ 

          return_value={"build": [], "srpm-build": []},

      )

      def test_failed_component_build(

-         self, dbg, connect, build_fn, config, ready, list_tasks_fn, db_session

+         self, dbg, connect, build_fn, config, ready, list_tasks_fn

      ):

          """ Test that when a KojiModuleBuilder.build fails, the build is

          marked as failed with proper state_reason.

          """

-         scheduler_init_data(db_session, 3)

+         scheduler_init_data(3)

          config.return_value = mock.Mock(), "development"

          build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(

-             config=conf, db_session=db_session, msg=msg)

-         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

  

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

@@ -10,6 +10,7 @@ 

  import module_build_service.scheduler.handlers.tags

  import module_build_service.models

  from tests import conf

+ from module_build_service.db_session import db_session

  

  import koji

  
@@ -18,7 +19,7 @@ 

  class TestTagTagged:

  

      @mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event")

-     def test_no_matching_module(self, from_tag_change_event, db_session):

+     def test_no_matching_module(self, from_tag_change_event):

          """ Test that when a tag msg hits us and we have no match,

          that we do nothing gracefully.

          """
@@ -26,9 +27,9 @@ 

          msg = module_build_service.messaging.KojiTagChange(

              "no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1")

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

-     def test_no_matching_artifact(self, db_session):

+     def test_no_matching_artifact(self):

          """ Test that when a tag msg hits us and we have no match,

          that we do nothing gracefully.

          """
@@ -39,7 +40,7 @@ 

              "artifact-1.2-1",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -47,7 +48,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo(self, create_builder, koji_get_session, dbg, db_session):

+     def test_newrepo(self, create_builder, koji_get_session, dbg):

          """

          Test that newRepo is called in the expected times.

          """
@@ -92,7 +93,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg

+             config=conf, msg=msg

          )

          # Tag the first component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(
@@ -102,7 +103,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg

+             config=conf, msg=msg

          )

  

          # newRepo should not be called, because there are still components
@@ -117,7 +118,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg

+             config=conf, msg=msg

          )

  

          # newRepo should not be called, because the component has not been
@@ -132,7 +133,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should be called now - all components have been tagged.

          koji_session.newRepo.assert_called_once_with(
@@ -152,7 +153,7 @@ 

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      def test_newrepo_still_building_components(

-         self, create_builder, koji_get_session, dbg, db_session

+         self, create_builder, koji_get_session, dbg

      ):

          """

          Test that newRepo is called in the expected times.
@@ -188,7 +189,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the perl-List-Compare component to final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -197,7 +198,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should not be called, because perl-List-Compare has not been

          # built yet.
@@ -209,7 +210,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg, db_session):

+     def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg):

          """

          Test that newRepo is called in the expected times.

          """
@@ -258,7 +259,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg

+             config=conf, msg=msg

          )

          # Tag the perl-List-Compare component to final tag.

          msg = module_build_service.messaging.KojiTagChange(
@@ -268,7 +269,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should be called now - all successfully built

          # components have been tagged.
@@ -289,7 +290,7 @@ 

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      def test_newrepo_multiple_batches_tagged(

-         self, create_builder, koji_get_session, dbg, db_session

+         self, create_builder, koji_get_session, dbg

      ):

          """

          Test that newRepo is called just once and only when all components
@@ -334,7 +335,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the first component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -343,7 +344,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should not be called, because there are still components

          # to tag.
@@ -357,7 +358,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the second component to final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -366,7 +367,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should not be called, because there are still components

          # to tag.
@@ -380,7 +381,7 @@ 

              "module-build-macros-0.1-1.module+0+b0a1d1f7",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the component from first batch to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -389,7 +390,7 @@ 

              "module-build-macros-0.1-1.module+0+b0a1d1f7",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should be called now - all components have been tagged.

          koji_session.newRepo.assert_called_once_with(
@@ -408,7 +409,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg, db_session):

+     def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg):

          """

          Test the component.build_time_only is respected in tag handler.

          """
@@ -461,7 +462,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          assert not koji_session.newRepo.called

          # Tag the perl-List-Compare component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -471,7 +472,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the perl-List-Compare component to final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -480,7 +481,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # newRepo should be called now - all successfully built

          # components have been tagged.
@@ -510,7 +511,7 @@ 

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      def test_newrepo_not_duplicated(

-         self, create_builder, koji_get_session, dbg, task_state, expect_new_repo, db_session

+         self, create_builder, koji_get_session, dbg, task_state, expect_new_repo

      ):

          """

          Test that newRepo is not called if a task is already in progress.
@@ -560,7 +561,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the first component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -569,7 +570,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the second component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -578,7 +579,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

          # Tag the second component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -587,7 +588,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, db_session=db_session, msg=msg)

+             config=conf, msg=msg)

  

          # All components are tagged, newRepo should be called if there are no active tasks.

          if expect_new_repo:

@@ -13,7 +13,7 @@ 

          clean_database()

  

      @patch("module_build_service.utils.greenwave.requests")

-     def test_greenwave_query_decision(self, mock_requests, db_session):

+     def test_greenwave_query_decision(self, mock_requests):

          resp_status = 200

          resp_content = {

              "applicable_policies": ["osci_compose_modules"],
@@ -43,7 +43,7 @@ 

                  "requires": {"platform": ["el8"]},

                  "buildrequires": {"platform": ["el8"]},

              }],

-             db_session=db_session)

+         )

          got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")

  

          assert got_response == resp_content
@@ -138,7 +138,7 @@ 

  

      @pytest.mark.parametrize("policies_satisfied", (True, False))

      @patch("module_build_service.utils.greenwave.requests")

-     def test_greenwave_check_gating(self, mock_requests, policies_satisfied, db_session):

+     def test_greenwave_check_gating(self, mock_requests, policies_satisfied):

          resp_status = 200

          policies_content = {

              "policies": [
@@ -165,7 +165,7 @@ 

                  "requires": {"platform": ["el8"]},

                  "buildrequires": {"platform": ["el8"]},

              }],

-             db_session=db_session)

+         )

          result = greenwave.check_gating(fake_build)

  

          assert result == policies_satisfied

file modified
+19 -19
@@ -112,7 +112,7 @@ 

          clean_database()

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession, db_session):

+     def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):

          koji_session = ClientSession.return_value

  

          # No module koji_tag in ursine content yet. This will result in empty
@@ -123,12 +123,12 @@ 

              "url": "http://example.com/repos/tag-4-build/latest/$arch/",

          }]

  

-         modulemds = ursine.get_modulemds_from_ursine_content(db_session, "tag")

+         modulemds = ursine.get_modulemds_from_ursine_content("tag")

          assert [] == modulemds

  

      @patch.object(conf, "koji_tag_prefixes", new=["module"])

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_modulemds(self, ClientSession, db_session):

+     def test_get_modulemds(self, ClientSession):

          koji_session = ClientSession.return_value

  

          # Ensure to to get build tag for further query of ursine content.
@@ -166,15 +166,15 @@ 

          mmd_name1s2020c = make_module_in_db(

              "name1:s:2020:c",

              xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}},

-             db_session=db_session)

+         )

          mmd_name2s2021c = make_module_in_db(

              "name2:s:2021:c",

              xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}},

-             db_session=db_session)

+         )

  

          koji_tag = "tag"  # It's ok to use arbitrary tag name.

          with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):

-             modulemds = ursine.get_modulemds_from_ursine_content(db_session, koji_tag)

+             modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)

  

          test_nsvcs = [item.get_nsvc() for item in modulemds]

          test_nsvcs.sort()
@@ -192,14 +192,14 @@ 

      @patch.object(conf, "base_module_names", new=["platform"])

      @patch.object(ursine, "find_stream_collision_modules")

      def test_nothing_changed_if_no_base_module_is_in_buildrequires(

-         self, find_stream_collision_modules, db_session

+         self, find_stream_collision_modules

      ):

          xmd = {"mbs": {"buildrequires": {"modulea": {"stream": "master"}}}}

          fake_mmd = make_module("name1:s:2020:c", xmd=xmd)

          original_xmd = fake_mmd.get_xmd()

  

          with patch.object(ursine, "log") as log:

-             ursine.handle_stream_collision_modules(db_session, fake_mmd)

+             ursine.handle_stream_collision_modules(fake_mmd)

              assert 2 == log.info.call_count

              find_stream_collision_modules.assert_not_called()

  
@@ -208,7 +208,7 @@ 

      @patch.object(conf, "base_module_names", new=["platform"])

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_mark_handled_even_if_no_modules_in_ursine_content(

-         self, get_modulemds_from_ursine_content, db_session

+         self, get_modulemds_from_ursine_content

      ):

          xmd = {

              "mbs": {
@@ -224,7 +224,7 @@ 

          get_modulemds_from_ursine_content.return_value = []

  

          with patch.object(ursine, "log") as log:

-             ursine.handle_stream_collision_modules(db_session, fake_mmd)

+             ursine.handle_stream_collision_modules(fake_mmd)

              assert 2 == log.info.call_count

  

          # Ensure stream_collision_modules is set.
@@ -237,7 +237,7 @@ 

      @patch("module_build_service.resolver.GenericResolver.create")

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_add_collision_modules(

-         self, ClientSession, resolver_create, get_modulemds_from_ursine_content, db_session

+         self, ClientSession, resolver_create, get_modulemds_from_ursine_content

      ):

          xmd = {

              "mbs": {
@@ -255,7 +255,7 @@ 

          }

          fake_mmd = make_module("name1:s:2020:c", xmd=xmd)

  

-         def mock_get_ursine_modulemds(db_session, koji_tag):

+         def mock_get_ursine_modulemds(koji_tag):

              if koji_tag == "module-rhel-8.0-build":

                  return [

                      # This is the one
@@ -304,7 +304,7 @@ 

          koji_session = ClientSession.return_value

          koji_session.listTaggedRPMS.side_effect = mock_listTaggedRPMS

  

-         ursine.handle_stream_collision_modules(db_session, fake_mmd)

+         ursine.handle_stream_collision_modules(fake_mmd)

  

          xmd = fake_mmd.get_xmd()

          buildrequires = xmd["mbs"]["buildrequires"]
@@ -326,13 +326,13 @@ 

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_no_modulemds_found_from_ursine_content(

-         self, get_modulemds_from_ursine_content, db_session

+         self, get_modulemds_from_ursine_content

      ):

          get_modulemds_from_ursine_content.return_value = []

-         assert not ursine.find_stream_collision_modules(db_session, {}, "koji_tag")

+         assert not ursine.find_stream_collision_modules({}, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

-     def test_no_collisions_found(self, get_modulemds_from_ursine_content, db_session):

+     def test_no_collisions_found(self, get_modulemds_from_ursine_content):

          xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}

          get_modulemds_from_ursine_content.return_value = [

              make_module("moduler:1:1:c1"),
@@ -340,10 +340,10 @@ 

              make_module("modulet:3:1:c3"),

          ]

          assert [] == ursine.find_stream_collision_modules(

-             db_session, xmd_mbs_buildrequires, "koji_tag")

+             xmd_mbs_buildrequires, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

-     def test_collision_modules_are_found(self, get_modulemds_from_ursine_content, db_session):

+     def test_collision_modules_are_found(self, get_modulemds_from_ursine_content):

          xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}

          fake_modules = [

              make_module("moduler:1:1:c1"),
@@ -353,5 +353,5 @@ 

          get_modulemds_from_ursine_content.return_value = fake_modules

  

          modules = ursine.find_stream_collision_modules(

-             db_session, xmd_mbs_buildrequires, "koji_tag")

+             xmd_mbs_buildrequires, "koji_tag")

          assert [fake_modules[1].get_nsvc()] == modules

file modified
+113 -136
@@ -14,6 +14,7 @@ 

  import module_build_service.scm

  from module_build_service import models, conf

  from module_build_service.errors import ProgrammingError, ValidationError, UnprocessableEntity

+ from module_build_service.utils.reuse import get_reusable_module, get_reusable_component

  from module_build_service.utils.general import load_mmd

  from module_build_service.utils.submit import format_mmd

  from tests import (
@@ -27,6 +28,7 @@ 

  import koji

  import pytest

  import module_build_service.scheduler.handlers.components

+ from module_build_service.db_session import db_session

  from module_build_service.builder.base import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service import Modulemd
@@ -83,7 +85,7 @@ 

      @pytest.mark.parametrize(

          "changed_component", ["perl-List-Compare", "perl-Tangerine", "tangerine", None]

      )

-     def test_get_reusable_component_different_component(self, changed_component, db_session):

+     def test_get_reusable_component_different_component(self, changed_component):

          second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          if changed_component:

              mmd = second_module_build.mmd()
@@ -96,12 +98,9 @@ 

              db_session.add(second_module_changed_component)

              db_session.commit()

  

-         plc_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-List-Compare")

-         pt_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-Tangerine")

-         tangerine_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "tangerine")

+         plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")

+         pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")

+         tangerine_rv = get_reusable_component(second_module_build, "tangerine")

  

          if changed_component == "perl-List-Compare":

              # perl-Tangerine can be reused even though a component in its batch has changed
@@ -125,7 +124,7 @@ 

              assert pt_rv.package == "perl-Tangerine"

              assert tangerine_rv.package == "tangerine"

  

-     def test_get_reusable_component_different_rpm_macros(self, db_session):

+     def test_get_reusable_component_different_rpm_macros(self):

          second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = second_module_build.mmd()

          buildopts = Modulemd.Buildopts()
@@ -134,18 +133,16 @@ 

          second_module_build.modulemd = mmd_to_str(mmd)

          db_session.commit()

  

-         plc_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-List-Compare")

+         plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")

          assert plc_rv is None

  

-         pt_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-Tangerine")

+         pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")

          assert pt_rv is None

  

      @pytest.mark.parametrize("set_current_arch", [True, False])

      @pytest.mark.parametrize("set_database_arch", [True, False])

      def test_get_reusable_component_different_arches(

-         self, set_database_arch, set_current_arch, db_session

+         self, set_database_arch, set_current_arch

      ):

          second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -167,14 +164,11 @@ 

              second_module_changed_component.module_build.modulemd = mmd_to_str(mmd)

              db_session.commit()

  

-         tangerine = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "tangerine")

+         tangerine = get_reusable_component(second_module_build, "tangerine")

          assert bool(tangerine is None) != bool(set_current_arch == set_database_arch)

  

      @pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys())

-     def test_get_reusable_component_different_buildrequires_stream(

-         self, rebuild_strategy, db_session

-     ):

+     def test_get_reusable_component_different_buildrequires_stream(self, rebuild_strategy):

          first_module_build = models.ModuleBuild.get_by_id(db_session, 2)

          first_module_build.rebuild_strategy = rebuild_strategy

          db_session.commit()
@@ -198,18 +192,15 @@ 

          second_module_build.rebuild_strategy = rebuild_strategy

          db_session.commit()

  

-         plc_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-List-Compare")

-         pt_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-Tangerine")

-         tangerine_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "tangerine")

+         plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")

+         pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")

+         tangerine_rv = get_reusable_component(second_module_build, "tangerine")

  

          assert plc_rv is None

          assert pt_rv is None

          assert tangerine_rv is None

  

-     def test_get_reusable_component_different_buildrequires(self, db_session):

+     def test_get_reusable_component_different_buildrequires(self):

          second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = second_module_build.mmd()

          mmd.get_dependencies()[0].add_buildtime_stream("some_module", "master")
@@ -227,20 +218,17 @@ 

              xmd["mbs"]["buildrequires"])

          db_session.commit()

  

-         plc_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-List-Compare")

+         plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")

          assert plc_rv is None

  

-         pt_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "perl-Tangerine")

+         pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")

          assert pt_rv is None

  

-         tangerine_rv = module_build_service.utils.get_reusable_component(

-             db_session, second_module_build, "tangerine")

+         tangerine_rv = get_reusable_component(second_module_build, "tangerine")

          assert tangerine_rv is None

  

      @patch("module_build_service.utils.submit.submit_module_build")

-     def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit, db_session):

+     def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit):

          """

          Tests local module build from a yaml file with the skiptests option

  
@@ -281,22 +269,22 @@ 

          clean_database()

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_build_arches(self, ClientSession, db_session):

+     def test_get_build_arches(self, ClientSession):

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": "ppc64le"}

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

-         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

+         r = module_build_service.utils.get_build_arches(mmd, conf)

          assert r == ["ppc64le"]

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_build_arches_no_arch_set(self, ClientSession, db_session):

+     def test_get_build_arches_no_arch_set(self, ClientSession):

          """

          When no architecture is set in Koji tag, fallback to conf.arches.

          """

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": ""}

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

-         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

+         r = module_build_service.utils.get_build_arches(mmd, conf)

          assert set(r) == set(conf.arches)

  

      @patch(
@@ -304,17 +292,17 @@ 

          new_callable=mock.PropertyMock,

          return_value=["testmodule"],

      )

-     def test_get_build_arches_koji_tag_arches(self, cfg, db_session):

+     def test_get_build_arches_koji_tag_arches(self, cfg):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          xmd["mbs"]["koji_tag_arches"] = ["ppc64", "ppc64le"]

          mmd.set_xmd(xmd)

  

-         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

+         r = module_build_service.utils.get_build_arches(mmd, conf)

          assert r == ["ppc64", "ppc64le"]

  

      @patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})

-     def test_get_build_arches_base_module_override(self, db_session):

+     def test_get_build_arches_base_module_override(self):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          mbs_options = xmd["mbs"] if "mbs" in xmd.keys() else {}
@@ -322,11 +310,11 @@ 

          xmd["mbs"] = mbs_options

          mmd.set_xmd(xmd)

  

-         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

+         r = module_build_service.utils.get_build_arches(mmd, conf)

          assert r == ["x86_64", "i686"]

  

      @pytest.mark.parametrize("context", ["c1", None])

-     def test_import_mmd_contexts(self, context, db_session):

+     def test_import_mmd_contexts(self, context):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          mmd.set_context(context)

  
@@ -344,7 +332,7 @@ 

              assert mmd_context == models.DEFAULT_MODULE_CONTEXT

              assert build.context == models.DEFAULT_MODULE_CONTEXT

  

-     def test_import_mmd_multiple_dependencies(self, db_session):

+     def test_import_mmd_multiple_dependencies(self):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          mmd.add_dependencies(mmd.get_dependencies()[0].copy())

  
@@ -353,7 +341,7 @@ 

              module_build_service.utils.import_mmd(db_session, mmd)

              assert str(e.value) == expected_error

  

-     def test_import_mmd_no_xmd_buildrequires(self, db_session):

+     def test_import_mmd_no_xmd_buildrequires(self):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          del xmd["mbs"]["buildrequires"]
@@ -367,7 +355,7 @@ 

              module_build_service.utils.import_mmd(db_session, mmd)

              assert str(e.value) == expected_error

  

-     def test_import_mmd_minimal_xmd_from_local_repository(self, db_session):

+     def test_import_mmd_minimal_xmd_from_local_repository(self):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          xmd["mbs"] = {}
@@ -390,7 +378,7 @@ 

              ("f-28", "fedora-28", "The disttag_marking cannot contain a dash"),

          ),

      )

-     def test_import_mmd_base_module(self, stream, disttag_marking, error_msg, db_session):

+     def test_import_mmd_base_module(self, stream, disttag_marking, error_msg):

          clean_database(add_platform_module=False)

          mmd = load_mmd(read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), stream)
@@ -406,7 +394,7 @@ 

          else:

              module_build_service.utils.import_mmd(db_session, mmd)

  

-     def test_import_mmd_remove_dropped_virtual_streams(self, db_session):

+     def test_import_mmd_remove_dropped_virtual_streams(self):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

  

          # Add some virtual streams
@@ -429,9 +417,7 @@ 

          assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams)

          assert 0 == db_session.query(models.VirtualStream).filter_by(name="f30").count()

  

-     def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules(

-         self, db_session

-     ):

+     def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules(self):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          # Add some virtual streams to this module metadata

          xmd = mmd.get_xmd()
@@ -463,7 +449,7 @@ 

          db_session.refresh(another_module_build)

          assert ["f29", "f30"] == sorted(item.name for item in another_module_build.virtual_streams)

  

-     def test_get_rpm_release_mse(self, db_session):

+     def test_get_rpm_release_mse(self):

          init_data(contexts=True)

  

          build_one = models.ModuleBuild.get_by_id(db_session, 2)
@@ -474,14 +460,14 @@ 

          release_two = module_build_service.utils.get_rpm_release(db_session, build_two)

          assert release_two == "module+2+17e35784"

  

-     def test_get_rpm_release_platform_stream(self, db_session):

-         scheduler_init_data(db_session, 1)

+     def test_get_rpm_release_platform_stream(self):

+         scheduler_init_data(1)

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "module+f28+2+814cfa39"

  

-     def test_get_rpm_release_platform_stream_override(self, db_session):

-         scheduler_init_data(db_session, 1)

+     def test_get_rpm_release_platform_stream_override(self):

+         scheduler_init_data(1)

  

          # Set the disttag_marking override on the platform

          platform = (
@@ -506,12 +492,12 @@ 

          new_callable=mock.PropertyMock,

          return_value=["build"],

      )

-     def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn, db_session):

+     def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn):

          """

          Test that when a module buildrequires a module in conf.allowed_privileged_module_names,

          and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag.

          """

-         scheduler_init_data(db_session, 1)

+         scheduler_init_data(1)

          metadata_mmd = load_mmd(read_staged_data("build_metadata_module"))

          module_build_service.utils.import_mmd(db_session, metadata_mmd)

  
@@ -535,7 +521,7 @@ 

          release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "module+product12+2+814cfa39"

  

-     def test_get_rpm_release_mse_scratch(self, db_session):

+     def test_get_rpm_release_mse_scratch(self):

          init_data(contexts=True, scratch=True)

  

          build_one = models.ModuleBuild.get_by_id(db_session, 2)
@@ -546,19 +532,19 @@ 

          release_two = module_build_service.utils.get_rpm_release(db_session, build_two)

          assert release_two == "scrmod+2+17e35784"

  

-     def test_get_rpm_release_platform_stream_scratch(self, db_session):

-         scheduler_init_data(db_session, 1, scratch=True)

+     def test_get_rpm_release_platform_stream_scratch(self):

+         scheduler_init_data(1, scratch=True)

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "scrmod+f28+2+814cfa39"

  

      @patch("module_build_service.utils.submit.get_build_arches")

-     def test_record_module_build_arches(self, get_build_arches, db_session):

+     def test_record_module_build_arches(self, get_build_arches):

          get_build_arches.return_value = ["x86_64", "i686"]

-         scheduler_init_data(db_session, 1)

+         scheduler_init_data(1)

          build = models.ModuleBuild.get_by_id(db_session, 2)

          build.arches = []

-         module_build_service.utils.record_module_build_arches(build.mmd(), build, db_session)

+         module_build_service.utils.record_module_build_arches(build.mmd(), build)

  

          arches = {arch.name for arch in build.arches}

          assert arches == set(get_build_arches.return_value)
@@ -624,7 +610,7 @@ 

          assert mmd_xmd == xmd

  

      @pytest.mark.usefixtures("reuse_shared_userspace_init_data")

-     def test_get_reusable_component_shared_userspace_ordering(self, db_session):

+     def test_get_reusable_component_shared_userspace_ordering(self):

          """

          For modules with lot of components per batch, there is big chance that

          the database will return them in different order than what we have for
@@ -633,9 +619,7 @@ 

          """

          old_module = models.ModuleBuild.get_by_id(db_session, 2)

          new_module = models.ModuleBuild.get_by_id(db_session, 3)

-         rv = module_build_service.utils.get_reusable_component(

-             db_session, new_module, "llvm", previous_module_build=old_module

-         )

+         rv = get_reusable_component(new_module, "llvm", previous_module_build=old_module)

          assert rv.package == "llvm"

  

      def test_validate_koji_tag_wrong_tag_arg_during_programming(self):
@@ -758,9 +742,7 @@ 

          validate_koji_tag_priv_mod_name(builder, "abc")

  

      @patch("module_build_service.scm.SCM")

-     def test_record_component_builds_duplicate_components(self, mocked_scm, db_session):

-         clean_database()

- 

+     def test_record_component_builds_duplicate_components(self, mocked_scm):

          # Mock for format_mmd to get components' latest ref

          mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

          mocked_scm.return_value.get_latest.side_effect = [
@@ -799,14 +781,12 @@ 

          format_mmd(mmd, module_build.scmurl)

          with pytest.raises(UnprocessableEntity) as e:

              module_build_service.utils.record_component_builds(

-                 db_session, mmd, module_build, main_mmd=module_build.mmd())

+                 mmd, module_build, main_mmd=module_build.mmd())

  

          assert str(e.value) == error_msg

  

      @patch("module_build_service.scm.SCM")

-     def test_record_component_builds_set_weight(self, mocked_scm, db_session):

-         clean_database()

- 

+     def test_record_component_builds_set_weight(self, mocked_scm):

          # Mock for format_mmd to get components' latest ref

          mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

          mocked_scm.return_value.get_latest.side_effect = [
@@ -837,7 +817,8 @@ 

          db_session.commit()

  

          format_mmd(mmd, module_build.scmurl)

-         module_build_service.utils.record_component_builds(db_session, mmd, module_build)

+         module_build_service.utils.record_component_builds(mmd, module_build)

+         db_session.commit()

  

          assert module_build.state == models.BUILD_STATES["init"]

          db_session.refresh(module_build)
@@ -845,8 +826,7 @@ 

              assert c.weight == 1.5

  

      @patch("module_build_service.scm.SCM")

-     def test_record_component_builds_component_exists_already(self, mocked_scm, db_session):

-         clean_database()

+     def test_record_component_builds_component_exists_already(self, mocked_scm):

          mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

          mocked_scm.return_value.get_latest.side_effect = [

              "4ceea43add2366d8b8c5a622a2fb563b625b9abf",
@@ -882,7 +862,8 @@ 

          db_session.commit()

  

          format_mmd(mmd, module_build.scmurl)

-         module_build_service.utils.record_component_builds(db_session, mmd, module_build)

+         module_build_service.utils.record_component_builds(mmd, module_build)

+         db_session.commit()

  

          mmd = original_mmd.copy("testmodule", "master")

  
@@ -891,7 +872,7 @@ 

                  ValidationError,

                  match=r"Component build .+ of module build .+ already exists in database"):

              format_mmd(mmd, module_build.scmurl)

-             module_build_service.utils.record_component_builds(db_session, mmd, module_build)

+             module_build_service.utils.record_component_builds(mmd, module_build)

  

      @patch("module_build_service.scm.SCM")

      def test_format_mmd_arches(self, mocked_scm):
@@ -933,7 +914,7 @@ 

  

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.ThreadPool")

-     def test_format_mmd_update_time_modified(self, tp, mocked_scm, db_session):

+     def test_format_mmd_update_time_modified(self, tp, mocked_scm):

          init_data()

          build = models.ModuleBuild.get_by_id(db_session, 2)

  
@@ -993,14 +974,14 @@ 

          is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1")

          assert is_eol

  

-     def test_get_prefixed_version_f28(self, db_session):

-         scheduler_init_data(db_session, 1)

+     def test_get_prefixed_version_f28(self):

+         scheduler_init_data(1)

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

          v = module_build_service.utils.submit.get_prefixed_version(build_one.mmd())

          assert v == 2820180205135154

  

-     def test_get_prefixed_version_fl701(self, db_session):

-         scheduler_init_data(db_session, 1)

+     def test_get_prefixed_version_fl701(self):

+         scheduler_init_data(1)

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

          mmd = build_one.mmd()

          xmd = mmd.get_xmd()
@@ -1010,13 +991,13 @@ 

          assert v == 7000120180205135154

  

      @patch("module_build_service.utils.mse.generate_expanded_mmds")

-     def test_submit_build_new_mse_build(self, generate_expanded_mmds, db_session):

+     def test_submit_build_new_mse_build(self, generate_expanded_mmds):

          """

          Tests that finished build can be resubmitted in case the resubmitted

          build adds new MSE build (it means there are new expanded

          buildrequires).

          """

-         build = make_module_in_db("foo:stream:0:c1", db_session=db_session)

+         build = make_module_in_db("foo:stream:0:c1")

          assert build.state == models.BUILD_STATES["ready"]

  

          mmd1 = build.mmd()
@@ -1041,8 +1022,7 @@ 

          new_callable=mock.PropertyMock,

          return_value=["^private-.*"],

      )

-     def test_submit_build_scratch_build_only_branches(

-             self, cfg, generate_expanded_mmds, db_session):

+     def test_submit_build_scratch_build_only_branches(self, cfg, generate_expanded_mmds):

          """

          Tests the "scratch_build_only_branches" config option.

          """
@@ -1147,7 +1127,7 @@ 

          DummyModuleBuilder.TAGGED_COMPONENTS = []

          GenericBuilder.register_backend_class(KojiModuleBuilder)

  

-     def test_start_next_batch_build_reuse(self, default_buildroot_groups, db_session):

+     def test_start_next_batch_build_reuse(self, default_buildroot_groups):

          """

          Tests that start_next_batch_build:

             1) Increments module.batch.
@@ -1162,7 +1142,7 @@ 

  

          builder = mock.MagicMock()

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1181,7 +1161,7 @@ 

          # the components just once.

          for msg in further_work:

              if type(msg) == module_build_service.messaging.KojiBuildChange:

-                 module_build_service.scheduler.handlers.components.complete(conf, db_session, msg)

+                 module_build_service.scheduler.handlers.components.complete(conf, msg)

  

          # Since we have reused all the components in the batch, there should

          # be fake KojiRepoChange message.
@@ -1192,7 +1172,7 @@ 

  

      @patch("module_build_service.utils.batches.start_build_component")

      def test_start_next_batch_build_reuse_some(

-         self, mock_sbc, default_buildroot_groups, db_session

+         self, mock_sbc, default_buildroot_groups

      ):

          """

          Tests that start_next_batch_build:
@@ -1205,6 +1185,7 @@ 

          """

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 1

+ 

          plc_component = models.ComponentBuild.from_component_name(

              db_session, "perl-List-Compare", 3)

          plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd"
@@ -1213,7 +1194,7 @@ 

          builder.recover_orphaned_artifact.return_value = []

  

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1241,7 +1222,7 @@ 

          return_value="all",

      )

      def test_start_next_batch_build_rebuild_strategy_all(

-         self, mock_rm, mock_sbc, default_buildroot_groups, db_session

+         self, mock_rm, mock_sbc, default_buildroot_groups

      ):

          """

          Tests that start_next_batch_build can't reuse any components in the batch because the
@@ -1254,7 +1235,7 @@ 

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1263,7 +1244,7 @@ 

          # Make sure that both components in the batch were submitted

          assert len(mock_sbc.mock_calls) == 2

  

-     def test_start_build_component_failed_state(self, default_buildroot_groups, db_session):

+     def test_start_build_component_failed_state(self, default_buildroot_groups):

          """

          Tests whether exception occured while building sets the state to failed

          """
@@ -1282,7 +1263,7 @@ 

          return_value="only-changed",

      )

      def test_start_next_batch_build_rebuild_strategy_only_changed(

-         self, mock_rm, mock_sbc, default_buildroot_groups, db_session

+         self, mock_rm, mock_sbc, default_buildroot_groups

      ):

          """

          Tests that start_next_batch_build reuses all unchanged components in the batch because the
@@ -1301,7 +1282,7 @@ 

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

  

          # Batch number should increase

          assert module_build.batch == 2
@@ -1331,7 +1312,7 @@ 

  

          # Start the next build batch

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

          # Batch number should increase

          assert module_build.batch == 3

          # Verify that tangerine was reused even though perl-Tangerine was rebuilt in the previous
@@ -1345,7 +1326,7 @@ 

  

      @patch("module_build_service.utils.batches.start_build_component")

      def test_start_next_batch_build_smart_scheduling(

-         self, mock_sbc, default_buildroot_groups, db_session

+         self, mock_sbc, default_buildroot_groups

      ):

          """

          Tests that components with the longest build time will be scheduled first
@@ -1367,7 +1348,7 @@ 

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1388,7 +1369,7 @@ 

          assert mock_sbc.mock_calls == expected_calls

  

      @patch("module_build_service.utils.batches.start_build_component")

-     def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups, db_session):

+     def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups):

          """

          Tests that start_next_batch_build does not start new batch when

          there are unbuilt components in the current one.
@@ -1403,7 +1384,7 @@ 

  

          builder = mock.MagicMock()

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db_session, builder)

+             conf, module_build, builder)

  

          # Batch number should not increase.

          assert module_build.batch == 2
@@ -1412,13 +1393,14 @@ 

          # No further work should be returned

          assert len(further_work) == 0

  

-     def test_start_next_batch_build_repo_building(self, default_buildroot_groups, db_session):

+     def test_start_next_batch_build_repo_building(self, default_buildroot_groups):

          """

          Test that start_next_batch_build does not start new batch when

          builder.buildroot_ready() returns False.

          """

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 1

+         db_session.commit()

  

          builder = mock.MagicMock()

          builder.buildroot_ready.return_value = False
@@ -1442,16 +1424,16 @@ 

      def teardown_method(self):

          clean_database()

  

-     def test_load_local_builds_name(self, conf_system, conf_resultsdir, db_session):

-         module_build_service.utils.load_local_builds(db_session, "testmodule")

+     def test_load_local_builds_name(self, conf_system, conf_resultsdir):

+         module_build_service.utils.load_local_builds("testmodule")

          local_modules = models.ModuleBuild.local_modules(db_session)

  

          assert len(local_modules) == 1

          assert local_modules[0].koji_tag.endswith(

              "/module-testmodule-master-20170816080816/results")

  

-     def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir, db_session):

-         module_build_service.utils.load_local_builds(db_session, "testmodule:master")

+     def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir):

+         module_build_service.utils.load_local_builds("testmodule:master")

          local_modules = models.ModuleBuild.local_modules(db_session)

  

          assert len(local_modules) == 1
@@ -1459,14 +1441,14 @@ 

              "/module-testmodule-master-20170816080816/results")

  

      def test_load_local_builds_name_stream_non_existing(

-         self, conf_system, conf_resultsdir, db_session

+         self, conf_system, conf_resultsdir

      ):

          with pytest.raises(RuntimeError):

-             module_build_service.utils.load_local_builds(db_session, "testmodule:x")

+             module_build_service.utils.load_local_builds("testmodule:x")

              models.ModuleBuild.local_modules(db_session)

  

-     def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir, db_session):

-         module_build_service.utils.load_local_builds(db_session, "testmodule:master:20170816080815")

+     def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir):

+         module_build_service.utils.load_local_builds("testmodule:master:20170816080815")

          local_modules = models.ModuleBuild.local_modules(db_session)

  

          assert len(local_modules) == 1
@@ -1474,21 +1456,21 @@ 

              "/module-testmodule-master-20170816080815/results")

  

      def test_load_local_builds_name_stream_version_non_existing(

-         self, conf_system, conf_resultsdir, db_session

+         self, conf_system, conf_resultsdir

      ):

          with pytest.raises(RuntimeError):

-             module_build_service.utils.load_local_builds(db_session, "testmodule:master:123")

+             module_build_service.utils.load_local_builds("testmodule:master:123")

              models.ModuleBuild.local_modules(db_session)

  

-     def test_load_local_builds_platform(self, conf_system, conf_resultsdir, db_session):

-         module_build_service.utils.load_local_builds(db_session, "platform")

+     def test_load_local_builds_platform(self, conf_system, conf_resultsdir):

+         module_build_service.utils.load_local_builds("platform")

          local_modules = models.ModuleBuild.local_modules(db_session)

  

          assert len(local_modules) == 1

          assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

  

-     def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir, db_session):

-         module_build_service.utils.load_local_builds(db_session, "platform:f28")

+     def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir):

+         module_build_service.utils.load_local_builds("platform:f28")

          local_modules = models.ModuleBuild.local_modules(db_session)

  

          assert len(local_modules) == 1
@@ -1502,8 +1484,8 @@ 

      def teardown_method(self):

          clean_database()

  

-     def test_import_fake_base_module(self, db_session):

-         module_build_service.utils.import_fake_base_module(db_session, "platform:foo:1:000000")

+     def test_import_fake_base_module(self):

+         module_build_service.utils.import_fake_base_module("platform:foo:1:000000")

          module_build = models.ModuleBuild.get_build_from_nsvc(

              db_session, "platform", "foo", 1, "000000")

          assert module_build
@@ -1523,7 +1505,7 @@ 

          assert set(mmd.get_profile_names()) == {"buildroot", "srpm-buildroot"}

  

      @patch("module_build_service.utils.general.open", create=True, new_callable=mock.mock_open)

-     def test_import_builds_from_local_dnf_repos(self, patched_open, db_session):

+     def test_import_builds_from_local_dnf_repos(self, patched_open):

          with patch("dnf.Base") as dnf_base:

              repo = mock.MagicMock()

              repo.repofile = "/etc/yum.repos.d/foo.repo"
@@ -1533,7 +1515,7 @@ 

              base.repos = {"reponame": repo}

              patched_open.return_value.readlines.return_value = ("FOO=bar", "PLATFORM_ID=platform:x")

  

-             module_build_service.utils.import_builds_from_local_dnf_repos(db_session)

+             module_build_service.utils.import_builds_from_local_dnf_repos()

  

              base.read_all_repos.assert_called_once()

              repo.load.assert_called_once()
@@ -1548,10 +1530,9 @@ 

                  db_session, "platform", "x", 1, "000000")

              assert module_build

  

-     def test_import_builds_from_local_dnf_repos_platform_id(self, db_session):

+     def test_import_builds_from_local_dnf_repos_platform_id(self):

          with patch("dnf.Base"):

-             module_build_service.utils.import_builds_from_local_dnf_repos(

-                 db_session, platform_id="platform:y")

+             module_build_service.utils.import_builds_from_local_dnf_repos("platform:y")

  

              module_build = models.ModuleBuild.get_build_from_nsvc(

                  db_session, "platform", "y", 1, "000000")
@@ -1561,7 +1542,7 @@ 

  @pytest.mark.usefixtures("reuse_component_init_data")

  class TestUtilsModuleReuse:

  

-     def test_get_reusable_module_when_reused_module_not_set(self, db_session):

+     def test_get_reusable_module_when_reused_module_not_set(self):

          module = db_session.query(models.ModuleBuild)\

                             .filter_by(name="testmodule")\

                             .order_by(models.ModuleBuild.id.desc())\
@@ -1571,13 +1552,12 @@ 

  

          assert not module.reused_module

  

-         reusable_module = module_build_service.utils.get_reusable_module(

-             db_session, module)

+         reusable_module = get_reusable_module(module)

  

          assert module.reused_module

          assert reusable_module.id == module.reused_module_id

  

-     def test_get_reusable_module_when_reused_module_already_set(self, db_session):

+     def test_get_reusable_module_when_reused_module_already_set(self):

          modules = db_session.query(models.ModuleBuild)\

                              .filter_by(name="testmodule")\

                              .order_by(models.ModuleBuild.id.desc())\
@@ -1591,8 +1571,7 @@ 

          assert build_module.reused_module

          assert reused_module == build_module.reused_module

  

-         reusable_module = module_build_service.utils.get_reusable_module(

-             db_session, build_module)

+         reusable_module = get_reusable_module(build_module)

  

          assert build_module.reused_module

          assert reusable_module.id == build_module.reused_module_id
@@ -1603,7 +1582,7 @@ 

          "module_build_service.config.Config.allow_only_compatible_base_modules",

          new_callable=mock.PropertyMock,

      )

-     def test_get_reusable_module_use_latest_build(self, cfg, db_session, allow_ocbm):

+     def test_get_reusable_module_use_latest_build(self, cfg, allow_ocbm):

          """

          Test that the `get_reusable_module` tries to reuse the latest module in case when

          multiple modules can be reused allow_only_compatible_base_modules is True.
@@ -1658,8 +1637,7 @@ 

                             .one()

          db_session.commit()

  

-         reusable_module = module_build_service.utils.get_reusable_module(

-             db_session, module)

+         reusable_module = get_reusable_module(module)

  

          if allow_ocbm:

              assert reusable_module.id == latest_module.id
@@ -1679,7 +1657,7 @@ 

          new_callable=mock.PropertyMock, return_value="koji"

      )

      def test_get_reusable_module_koji_resolver(

-             self, resolver, ClientSession, cfg, db_session, allow_ocbm):

+             self, resolver, ClientSession, cfg, allow_ocbm):

          """

          Test that get_reusable_module works with KojiResolver.

          """
@@ -1744,7 +1722,6 @@ 

                             .filter_by(state=models.BUILD_STATES["build"])\

                             .one()

  

-         reusable_module = module_build_service.utils.get_reusable_module(

-             db_session, module)

+         reusable_module = get_reusable_module(module)

  

          assert reusable_module.id == latest_module.id

@@ -5,6 +5,7 @@ 

  

  import module_build_service.utils

  from module_build_service import Modulemd, models

+ from module_build_service.db_session import db_session

  from module_build_service.errors import StreamAmbigous

  from tests import clean_database, make_module_in_db, init_data, read_staged_data

  
@@ -29,13 +30,13 @@ 

          nsvcs = [m.get_nsvc() for m in modules]

          return nsvcs

  

-     def _generate_default_modules(self, db_session):

+     def _generate_default_modules(self):

          """

          Generates gtk:1, gtk:2, foo:1 and foo:2 modules requiring the

          platform:f28 and platform:f29 modules.

          """

-         platform_f28 = make_module_in_db("platform:f28:0:c10", db_session=db_session)

-         platform_f29 = make_module_in_db("platform:f29:0:c11", db_session=db_session)

+         platform_f28 = make_module_in_db("platform:f28:0:c10")

+         platform_f29 = make_module_in_db("platform:f29:0:c11")

          f28_deps = [{

              "requires": {"platform": ["f28"]},

              "buildrequires": {"platform": ["f28"]},
@@ -44,18 +45,18 @@ 

              "requires": {"platform": ["f29"]},

              "buildrequires": {"platform": ["f29"]},

          }]

-         make_module_in_db("gtk:1:0:c2", f28_deps, base_module=platform_f28, db_session=db_session)

-         make_module_in_db("gtk:1:0:c3", f29_deps, base_module=platform_f29, db_session=db_session)

-         make_module_in_db("gtk:2:0:c4", f28_deps, base_module=platform_f28, db_session=db_session)

-         make_module_in_db("gtk:2:0:c5", f29_deps, base_module=platform_f29, db_session=db_session)

-         make_module_in_db("foo:1:0:c2", f28_deps, base_module=platform_f28, db_session=db_session)

-         make_module_in_db("foo:1:0:c3", f29_deps, base_module=platform_f29, db_session=db_session)

-         make_module_in_db("foo:2:0:c4", f28_deps, base_module=platform_f28, db_session=db_session)

-         make_module_in_db("foo:2:0:c5", f29_deps, base_module=platform_f29, db_session=db_session)

-         make_module_in_db("app:1:0:c6", f29_deps, base_module=platform_f29, db_session=db_session)

- 

-     def test_generate_expanded_mmds_context(self, db_session):

-         self._generate_default_modules(db_session)

+         make_module_in_db("gtk:1:0:c2", f28_deps, base_module=platform_f28)

+         make_module_in_db("gtk:1:0:c3", f29_deps, base_module=platform_f29)

+         make_module_in_db("gtk:2:0:c4", f28_deps, base_module=platform_f28)

+         make_module_in_db("gtk:2:0:c5", f29_deps, base_module=platform_f29)

+         make_module_in_db("foo:1:0:c2", f28_deps, base_module=platform_f28)

+         make_module_in_db("foo:1:0:c3", f29_deps, base_module=platform_f29)

+         make_module_in_db("foo:2:0:c4", f28_deps, base_module=platform_f28)

+         make_module_in_db("foo:2:0:c5", f29_deps, base_module=platform_f29)

+         make_module_in_db("app:1:0:c6", f29_deps, base_module=platform_f29)

+ 

+     def test_generate_expanded_mmds_context(self):

+         self._generate_default_modules()

          module_build = make_module_in_db(

              "app:1:0:c1", [{

                  "requires": {"gtk": ["1", "2"]},
@@ -64,7 +65,7 @@ 

                      "gtk": ["1", "2"]

                  },

              }],

-             db_session=db_session)

+         )

          mmds = module_build_service.utils.generate_expanded_mmds(

              db_session, module_build.mmd())

          contexts = {mmd.get_context() for mmd in mmds}
@@ -157,10 +158,10 @@ 

          ],

      )

      def test_generate_expanded_mmds_buildrequires(

-         self, module_deps, stream_ambigous, expected_xmd, expected_buildrequires, db_session

+         self, module_deps, stream_ambigous, expected_xmd, expected_buildrequires

      ):

-         self._generate_default_modules(db_session)

-         module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)

+         self._generate_default_modules()

+         module_build = make_module_in_db("app:1:0:c1", module_deps)

  

          # Check that generate_expanded_mmds raises an exception if stream is ambigous

          # and also that it does not raise an exception otherwise.
@@ -250,9 +251,9 @@ 

              ),

          ],

      )

-     def test_generate_expanded_mmds_requires(self, module_deps, expected, db_session):

-         self._generate_default_modules(db_session)

-         module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)

+     def test_generate_expanded_mmds_requires(self, module_deps, expected):

+         self._generate_default_modules()

+         module_build = make_module_in_db("app:1:0:c1", module_deps)

          mmds = module_build_service.utils.generate_expanded_mmds(db_session, module_build.mmd())

  

          requires_per_mmd = set()
@@ -342,55 +343,55 @@ 

              ),

          ],

      )

-     def test_get_required_modules_simple(self, module_deps, expected, db_session):

-         module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)

-         self._generate_default_modules(db_session)

+     def test_get_required_modules_simple(self, module_deps, expected):

+         module_build = make_module_in_db("app:1:0:c1", module_deps)

+         self._generate_default_modules()

          nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)

          assert set(nsvcs) == set(expected)

  

-     def _generate_default_modules_recursion(self, db_session):

+     def _generate_default_modules_recursion(self):

          """

          Generates the gtk:1 module requiring foo:1 module requiring bar:1

          and lorem:1 modules which require base:f29 module requiring

          platform:f29 module :).

          """

-         base_module = make_module_in_db("platform:f29:0:c11", db_session=db_session)

+         base_module = make_module_in_db("platform:f29:0:c11")

          make_module_in_db(

              "gtk:1:0:c2",

              [{"requires": {"foo": ["unknown"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "gtk:1:1:c2",

              [{"requires": {"foo": ["1"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "foo:1:0:c2",

              [{"requires": {"bar": ["unknown"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "foo:1:1:c2",

              [{"requires": {"bar": ["1"], "lorem": ["1"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "bar:1:0:c2",

              [{"requires": {"base": ["unknown"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "bar:1:1:c2",

              [{"requires": {"base": ["f29"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "lorem:1:0:c2",

              [{"requires": {"base": ["unknown"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "lorem:1:1:c2",

              [{"requires": {"base": ["f29"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

          make_module_in_db(

              "base:f29:0:c3",

              [{"requires": {"platform": ["f29"]}, "buildrequires": {}}],

-             base_module=base_module, db_session=db_session)

+             base_module=base_module)

  

      @pytest.mark.parametrize(

          "module_deps,expected",
@@ -418,13 +419,13 @@ 

              ),

          ],

      )

-     def test_get_required_modules_recursion(self, module_deps, expected, db_session):

-         module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)

-         self._generate_default_modules_recursion(db_session)

+     def test_get_required_modules_recursion(self, module_deps, expected):

+         module_build = make_module_in_db("app:1:0:c1", module_deps)

+         self._generate_default_modules_recursion()

          nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)

          assert set(nsvcs) == set(expected)

  

-     def _generate_default_modules_modules_multiple_stream_versions(self, db_session):

+     def _generate_default_modules_modules_multiple_stream_versions(self):

          """

          Generates the gtk:1 module requiring foo:1 module requiring bar:1

          and lorem:1 modules which require base:f29 module requiring
@@ -436,17 +437,17 @@ 

          }]

  

          f290000 = make_module_in_db(

-             "platform:f29.0.0:0:c11", db_session=db_session, virtual_streams=["f29"])

-         make_module_in_db("gtk:1:0:c2", f29_dep, base_module=f290000, db_session=db_session)

+             "platform:f29.0.0:0:c11", virtual_streams=["f29"])

+         make_module_in_db("gtk:1:0:c2", f29_dep, base_module=f290000)

  

          f290100 = make_module_in_db(

-             "platform:f29.1.0:0:c11", db_session=db_session, virtual_streams=["f29"])

-         make_module_in_db("gtk:1:1:c2", f29_dep, base_module=f290100, db_session=db_session)

-         make_module_in_db("gtk:1:2:c2", f29_dep, base_module=f290100, db_session=db_session)

+             "platform:f29.1.0:0:c11", virtual_streams=["f29"])

+         make_module_in_db("gtk:1:1:c2", f29_dep, base_module=f290100)

+         make_module_in_db("gtk:1:2:c2", f29_dep, base_module=f290100)

  

          f290200 = make_module_in_db(

-             "platform:f29.2.0:0:c11", db_session=db_session, virtual_streams=["f29"])

-         make_module_in_db("gtk:1:3:c2", f29_dep, base_module=f290200, db_session=db_session)

+             "platform:f29.2.0:0:c11", virtual_streams=["f29"])

+         make_module_in_db("gtk:1:3:c2", f29_dep, base_module=f290200)

  

      @pytest.mark.parametrize(

          "module_deps,expected",
@@ -460,13 +461,13 @@ 

              )

          ],

      )

-     def test_get_required_modules_stream_versions(self, module_deps, expected, db_session):

-         module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)

-         self._generate_default_modules_modules_multiple_stream_versions(db_session)

+     def test_get_required_modules_stream_versions(self, module_deps, expected):

+         module_build = make_module_in_db("app:1:0:c1", module_deps)

+         self._generate_default_modules_modules_multiple_stream_versions()

          nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)

          assert set(nsvcs) == set(expected)

  

-     def test__get_base_module_mmds(self, db_session):

+     def test__get_base_module_mmds(self):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=True)

          mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2.yaml"))
@@ -490,7 +491,7 @@ 

          assert actual == expected

  

      @pytest.mark.parametrize("virtual_streams", (None, ["f29"], ["lp29"]))

-     def test__get_base_module_mmds_virtual_streams(self, virtual_streams, db_session):

+     def test__get_base_module_mmds_virtual_streams(self, virtual_streams):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=True)

          mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2"))
@@ -502,9 +503,7 @@ 

          mmd.remove_dependencies(deps)

          mmd.add_dependencies(new_deps)

  

-         make_module_in_db(

-             "platform:lp29.1.1:12:c11",

-             db_session=db_session, virtual_streams=virtual_streams)

+         make_module_in_db("platform:lp29.1.1:12:c11", virtual_streams=virtual_streams)

  

          mmds = module_build_service.utils.mse.get_base_module_mmds(db_session, mmd)

          if virtual_streams == ["f29"]:
@@ -528,7 +527,7 @@ 

          "module_build_service.config.Config.allow_only_compatible_base_modules",

          new_callable=PropertyMock, return_value=False

      )

-     def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg, db_session):

+     def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=["foo28", "foo29", "foo30"])

  

@@ -556,14 +556,15 @@ 

          assert data["state_trace"][0]["state_name"] == "wait"

          assert data["state_url"], "/module-build-service/1/component-builds/3"

  

-     def test_query_component_builds_trace_is_serialized_with_none_state(self, db_session):

+     def test_query_component_builds_trace_is_serialized_with_none_state(self):

          # Beside the module builds and their component builds created already

          # in setup_method, some new component builds with None state must be

          # created for this test to ensure the extended_json works well to

          # serialize component build trace correctly.

  

-         module_build = make_module_in_db(

-             "cool-module:10:201907291454:c1", db_session=db_session)

+         from module_build_service.db_session import db_session

+ 

+         module_build = make_module_in_db("cool-module:10:201907291454:c1")

          component_release = get_rpm_release(db_session, module_build)

  

          # No state is set.

Please note that this patch does not change the use of database session
in MBS. So, in the frontend, the database session is still managed by
Flask-SQLAlchemy, that is the db.session. And the backend, running event
handlers, has its own database session created from SQLAclehmy session
API directly.

This patch aims to reduce the number of scoped_session created when call
original function make_db_session. For technical detailed information,
please refer to SQLAlchemy documentation Contextual/Thread-local
Sessions.

As a result, a global scoped_session is accessible from the
code running inside backend, both the event handlers and functions
called from handlers. The library code shared by frontend and backend,
like resolvers, has no change.

Similarly, db.session is only used to recreate database for every test.

Signed-off-by: Chenxiong Qi cqi@redhat.com

I'm thinking, is it possible to move back to in-memory sqlite database for tests after this change?

@cqi could you please delete module_build_service.models._dummy_context_mgr?

Please do remember to commit changes outside where to call this function. => Please remember to commit the changes where the function is called.

are left inside test and forget to => were made using the database session but the test didn't

to remain => to keep
in transaction => in the transaction

This line can be deleted. It's clear based on the previous two lines.

Why is this called twice?

Could you remove But, it would be better just drop data only, not create database from scratch. and file an issue instead?

Why not just call db_session.remove() directly in make_simple_stop_condition instead? I can't think of a scenario where that approach isn't useful.

modifies comopnent build => modifies a component build
but not commit => but doesn't commit the changes.

Could you remove this newline please?

I don't understand this line. Do you mean that you are essentially recreating the session to refresh the module build since it was removed by the poller?

This line can be removed. The database session can be removed safely. is sufficient.

You can put this on the line above

@cqi It doesn't seem like this breaks compatibility. When the review is complete and you get approval, could we merge this in master instead of v3? This will help us test this change as part of a small release and it will help with reducing merge conflicts when we eventually merge v3 into master.

@mprahl The major reason I'd like to merge this into v3 is it could be tested with other changes for Celery migration together. If we merge this pr into master, my concern is something would become complex for us to manage the final merge from v3 to master. To reduce the conflicts, I think I can rebase v3 branch regularly based on master and force udpate v3 branch. What do you think?

rebased onto 67490b54ba7f11ec312d92e450e2c744562c117a

4 years ago

Why is this called twice?

db.session.remove() is removed.

Could you remove But, it would be better just drop data only, not create database from scratch. and file an issue instead?

Issue FACTORY-5482 is created for this.

Why not just call db_session.remove() directly in make_simple_stop_condition instead? I can't think of a scenario where that approach isn't useful.

Is make_simple_stop_condition only used for tests and local build?

Why was this added?

Removed the commit call.

I don't understand this line. Do you mean that you are essentially recreating the session to refresh the module build since it was removed by the poller?

The db_session.remove() in poller.poll() removes session object. After that, when access db_session, SQLAlchemy scoped_session ensures to create a new session object.

Other comments are all addressed.

Build #542 failed (commit: 67490b54ba7f11ec312d92e450e2c744562c117a).
Rebase or make new commits to rebuild.

pretty please pagure-ci rebuild

4 years ago

Is make_simple_stop_condition only used for tests and local build?

Yes, it is used only for tests (I think mainly test_build.py) and local builds. I'm not sure what the plan is when we move completely to Celery, but I think we wanted to remove fedmsg-hub completely and use only Celery for local builds, so in the future, this function will be removed completely.

rebased onto 50a2537fa479efdb3e4c433a177b9a2b847a006f

4 years ago

safe_stop_cond is removed. make_simple_stop_condition is updated to call db_session.remove() when stop condition is True.

@mprahl PTAL.

@cqi could you please delete module_build_service.models._dummy_context_mgr?

I ended up filing #1491 instead. It seems the function was already unused prior to this PR.

I find this comment confusing because it implies that a different database session is used in the poll method. In production that would be true since it runs in a different thread, but when run as a test, it is run in the same thread unless I'm missing something.

In reality, this database session was cleaned up in the poll method and you need to recreate the database session with the context of module_build before refreshing the object. It might just be easier to understand if you did

module_build = models.ModuleBuild.get_by_id(db_session, 3)

instead of

db_session.add(module_build)
# Refresh our module_build object.
db_session.refresh(module_build)

Either way, the comment needs clarification.

rebased onto e71d250

4 years ago

@mprahl The major reason I'd like to merge this into v3 is it could be tested with other changes for Celery migration together. If we merge this pr into master, my concern is something would become complex for us to manage the final merge from v3 to master. To reduce the conflicts, I think I can rebase v3 branch regularly based on master and force udpate v3 branch. What do you think?

@cqi I think you misunderstood. My suggestion is to merge this into master, then the v3 branch will be based off of master once this is merged. This will allow us to test this change in relative isolation when it is deployed now rather than having to test it with the several other changes that will be breaking in the v3 branch.

Also, it allows future changes in the master branch to use the new db_session and it will hopefully make rebasing the v3 branch easier.

@mprahl I tried to modify this PR to change the base branch from v3 to master, but seems pagure.io does not support that. I'll close this PR and create another one based on master branch. Once it is merge, I'll rebase v3 and update it.

Pull-Request has been closed by cqi

4 years ago
Changes Summary 50
+3 -3
file changed
module_build_service/builder/KojiContentGenerator.py
+65 -67
file changed
module_build_service/builder/KojiModuleBuilder.py
+16 -16
file changed
module_build_service/builder/MockModuleBuilder.py
+76
file added
module_build_service/db_session.py
+47 -45
file changed
module_build_service/manage.py
+1 -116
file changed
module_build_service/models.py
+0 -17
file changed
module_build_service/monitor.py
+12 -1
file changed
module_build_service/scheduler/__init__.py
+9 -8
file changed
module_build_service/scheduler/consumer.py
+2 -2
file changed
module_build_service/scheduler/default_modules.py
+9 -8
file changed
module_build_service/scheduler/handlers/components.py
+4 -5
file changed
module_build_service/scheduler/handlers/greenwave.py
+14 -13
file changed
module_build_service/scheduler/handlers/modules.py
+3 -2
file changed
module_build_service/scheduler/handlers/repos.py
+2 -1
file changed
module_build_service/scheduler/handlers/tags.py
+33 -30
file changed
module_build_service/scheduler/producer.py
+9 -9
file changed
module_build_service/utils/batches.py
+5 -7
file changed
module_build_service/utils/general.py
+14 -12
file changed
module_build_service/utils/reuse.py
+9 -14
file changed
module_build_service/utils/submit.py
+8 -11
file changed
module_build_service/utils/ursine.py
+14 -2
file changed
module_build_service/views.py
+17 -13
file changed
tests/__init__.py
+5 -11
file changed
tests/conftest.py
+78 -65
file changed
tests/test_build/test_build.py
+2 -1
file changed
tests/test_builder/test_base.py
+44 -25
file changed
tests/test_builder/test_koji.py
+8 -7
file changed
tests/test_builder/test_mock.py
+5 -7
file changed
tests/test_content_generator.py
+3 -2
file changed
tests/test_logger.py
+5 -4
file changed
tests/test_manage.py
+24 -34
file changed
tests/test_models/test_models.py
+19 -12
file changed
tests/test_monitor.py
+16 -19
file changed
tests/test_resolver/test_db.py
+22 -22
file changed
tests/test_resolver/test_koji.py
+3 -2
file changed
tests/test_resolver/test_local.py
+15 -14
file changed
tests/test_resolver/test_mbs.py
+1 -1
file changed
tests/test_scheduler/test_consumer.py
+13 -12
file changed
tests/test_scheduler/test_default_modules.py
+23 -20
file changed
tests/test_scheduler/test_greenwave.py
+13 -14
file changed
tests/test_scheduler/test_module_init.py
+18 -39
file changed
tests/test_scheduler/test_module_wait.py
+39 -33
file changed
tests/test_scheduler/test_poller.py
+20 -26
file changed
tests/test_scheduler/test_repo_done.py
+32 -31
file changed
tests/test_scheduler/test_tag_tagged.py
+4 -4
file changed
tests/test_utils/test_greenwave.py
+19 -19
file changed
tests/test_utils/test_ursine.py
+113 -136
file changed
tests/test_utils/test_utils.py
+55 -56
file changed
tests/test_utils/test_utils_mse.py
+4 -3
file changed
tests/test_views/test_views.py