#1333 Separate use of database sessions
Merged 4 years ago by cqi. Opened 4 years ago by cqi.
cqi/fm-orchestrator separate-db-session  into  master

file modified
+2 -1
@@ -96,7 +96,8 @@ 

      BUILD_LOGS_NAME_FORMAT = "build-{id}.log"

      LOG_BACKEND = "console"

      LOG_LEVEL = "debug"

-     SQLALCHEMY_DATABASE_URI = environ.get("DATABASE_URI", "sqlite://")

+     SQLALCHEMY_DATABASE_URI = environ.get(

+         "DATABASE_URI", "sqlite:///{0}".format(path.join(dbdir, "mbstest.db")))

      DEBUG = True

      MESSAGING = "in_memory"

      PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1"

@@ -42,7 +42,7 @@ 

  import koji

  import pungi.arch

  

- from module_build_service import conf, log, build_logs, Modulemd

+ from module_build_service import conf, log, build_logs, models, Modulemd

  from module_build_service.scm import SCM

  from module_build_service.utils import to_text_type, load_mmd, mmd_to_str

  
@@ -789,7 +789,8 @@ 

  

          log_path = os.path.join(prepdir, "build.log")

          try:

-             source = build_logs.path(self.module)

+             with models.make_db_session(conf) as db_session:

+                 source = build_logs.path(db_session, self.module)

              log.info("Moving logs from %r to %r" % (source, log_path))

              shutil.copy(source, log_path)

          except IOError as e:

@@ -161,13 +161,15 @@ 

      region = dogpile.cache.make_region().configure("dogpile.cache.memory")

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

          """

+         :param db_session: SQLAlchemy session object.

          :param owner: a string representing who kicked off the builds

          :param module: module_build_service.models.ModuleBuild instance.

          :param config: module_build_service.config.Config instance

          :param tag_name: name of tag for given module

          """

+         self.db_session = db_session

          self.owner = owner

          self.module_str = module.name

          self.module = module
@@ -248,7 +250,7 @@ 

          """

          # filtered_rpms will contain the NVRs of non-reusable component's RPMs

          filtered_rpms = list(set(filtered_rpms_of_dep))

-         with models.make_session(conf) as db_session:

+         with models.make_db_session(conf) as db_session:

              # Get a module build that can be reused, which will likely be the

              # build dep that is used since it relies on itself

              reusable_module = get_reusable_module(db_session, module_build)
@@ -739,7 +741,7 @@ 

              # If the build cannot be found in the tags, it may be untagged as a result

              # of some earlier inconsistent situation. Let's find the task_info

              # based on the list of untagged builds

-             release = module_build_service.utils.get_rpm_release(self.module)

+             release = module_build_service.utils.get_rpm_release(self.db_session, self.module)

              untagged = self.koji_session.untaggedBuilds(name=component_build.package)

              for untagged_build in untagged:

                  if untagged_build["release"].endswith(release):
@@ -1272,7 +1274,7 @@ 

          :param Modulemd mmd: Modulemd to get the built RPMs from.

          :return: list of NVRs

          """

-         with models.make_session(conf) as db_session:

+         with models.make_db_session(conf) as db_session:

              build = models.ModuleBuild.get_build_from_nsvc(

                  db_session,

                  mmd.get_module_name(),

@@ -97,7 +97,8 @@ 

          raise IOError("None of {} yum config files found.".format(conf.yum_config_file))

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

+         self.db_session = db_session

          self.module_str = module.name

          self.module = module

          self.tag_name = tag_name
@@ -582,7 +583,7 @@ 

          :param Modulemd mmd: Modulemd to get the built RPMs from.

          :return: list of NVRs

          """

-         with models.make_session(conf) as db_session:

+         with models.make_db_session(conf) as db_session:

              build = models.ModuleBuild.get_build_from_nsvc(

                  db_session,

                  mmd.get_module_name(),

@@ -37,6 +37,7 @@ 

  import module_build_service.resolver

  import module_build_service.scm

  import module_build_service.utils

+ from module_build_service.resolver import GenericResolver

  from module_build_service.utils import create_dogpile_key_generator_func

  

  
@@ -100,8 +101,9 @@ 

          GenericBuilder.backends[backend_class.backend] = backend_class

  

      @classmethod

-     def create(cls, owner, module, backend, config, **extra):

+     def create(cls, db_session, owner, module, backend, config, **extra):

          """

+         :param db_session: SQLAlchemy session object.

          :param owner: a string representing who kicked off the builds

          :param module: module_build_service.models.ModuleBuild instance.

          :param backend: a string representing backend e.g. 'koji'
@@ -111,7 +113,7 @@ 

          and are implementation-dependent.

          """

          # check if the backend is within allowed backends for the used resolver

-         resolver = module_build_service.resolver.system_resolver

+         resolver = GenericResolver.create(db_session, conf)

          if not resolver.is_builder_compatible(backend):

              raise ValueError(

                  "Builder backend '{}' is not compatible with resolver backend '{}'. Check your "
@@ -120,17 +122,17 @@ 

  

          if backend in GenericBuilder.backends:

              return GenericBuilder.backends[backend](

-                 owner=owner, module=module, config=config, **extra)

+                 db_session=db_session, owner=owner, module=module, config=config, **extra)

          else:

              raise ValueError("Builder backend='%s' not recognized" % backend)

  

      @classmethod

-     def create_from_module(cls, session, module, config, buildroot_connect=True):

+     def create_from_module(cls, db_session, module, config, buildroot_connect=True):

          """

          Creates new GenericBuilder instance based on the data from module

          and config and connects it to buildroot.

  

-         :param session: SQLAlchemy databa session.

+         :param db_session: SQLAlchemy database session.

          :param module: module_build_service.models.ModuleBuild instance.

          :param config: module_build_service.config.Config instance.

          :kwarg buildroot_connect: a boolean that determines if the builder should run
@@ -138,6 +140,7 @@ 

          """

          components = [c.package for c in module.component_builds]

          builder = GenericBuilder.create(

+             db_session,

              module.owner,

              module,

              config.system,
@@ -146,7 +149,7 @@ 

              components=components,

          )

          if buildroot_connect is True:

-             groups = GenericBuilder.default_buildroot_groups(session, module)

+             groups = GenericBuilder.default_buildroot_groups(db_session, module)

              builder.buildroot_connect(groups)

          return builder

  
@@ -305,10 +308,10 @@ 

      @classmethod

      @module_build_service.utils.retry(wait_on=(ConnectionError))

      @default_buildroot_groups_cache.cache_on_arguments()

-     def default_buildroot_groups(cls, session, module):

+     def default_buildroot_groups(cls, db_session, module):

          try:

              mmd = module.mmd()

-             resolver = module_build_service.resolver.system_resolver

+             resolver = GenericResolver.create(db_session, conf)

  

              # Resolve default buildroot groups using the MBS, but only for

              # non-local modules.
@@ -317,8 +320,9 @@ 

          except ValueError:

              reason = "Failed to gather buildroot groups from SCM."

              log.exception(reason)

-             module.transition(conf, state="failed", state_reason=reason, failure_type="user")

-             session.commit()

+             module.transition(

+                 db_session, conf, state="failed", state_reason=reason, failure_type="user")

+             db_session.commit()

              raise

          return groups

  

@@ -100,21 +100,21 @@ 

          self.build_logs_name_format = build_logs_name_format

          self.level = level

  

-     def path(self, build):

+     def path(self, db_session, build):

          """

          Returns the full path to build log of module with id `build_id`.

          """

-         path = os.path.join(self.build_logs_dir, self.name(build))

+         path = os.path.join(self.build_logs_dir, self.name(db_session, build))

          return path

  

-     def name(self, build):

+     def name(self, db_session, build):

          """

          Returns the filename for a module build

          """

-         name = self.build_logs_name_format.format(**build.json())

+         name = self.build_logs_name_format.format(**build.json(db_session))

          return name

  

-     def start(self, build):

+     def start(self, db_session, build):

          """

          Starts logging build log for module with `build_id` id.

          """
@@ -125,7 +125,7 @@ 

              return

  

          # Create and add ModuleBuildFileHandler.

-         handler = ModuleBuildFileHandler(build.id, self.path(build))

+         handler = ModuleBuildFileHandler(build.id, self.path(db_session, build))

          handler.setLevel(self.level)

          handler.setFormatter(logging.Formatter(log_format, None))

          log = logging.getLogger()

file modified
+61 -56
@@ -141,60 +141,63 @@ 

              raise ValueError(

                  "Please set RESOLVER to 'mbs' in your configuration for local builds.")

  

-     with app.app_context():

-         conf.set_item("system", "mock")

-         conf.set_item("base_module_repofiles", platform_repofiles)

- 

-         # Use our own local SQLite3 database.

-         confdir = os.path.abspath(os.getcwd())

-         dbdir = \

-             os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir

-         dbpath = "/{0}".format(os.path.join(dbdir, ".mbs_local_build.db"))

-         dburi = "sqlite://" + dbpath

-         app.config["SQLALCHEMY_DATABASE_URI"] = dburi

-         conf.set_item("sqlalchemy_database_uri", dburi)

-         if os.path.exists(dbpath):

-             os.remove(dbpath)

- 

-         db.create_all()

+     conf.set_item("system", "mock")

+     conf.set_item("base_module_repofiles", platform_repofiles)

+ 

+     # Use our own local SQLite3 database.

+     confdir = os.path.abspath(os.getcwd())

+     dbdir = \

+         os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir

+     dbpath = "/{0}".format(os.path.join(dbdir, ".mbs_local_build.db"))

+     dburi = "sqlite://" + dbpath

+     app.config["SQLALCHEMY_DATABASE_URI"] = dburi

+     conf.set_item("sqlalchemy_database_uri", dburi)

+     if os.path.exists(dbpath):

+         os.remove(dbpath)

+ 

+     db.create_all()

+ 

+     params = {}

+     params["local_build"] = True

+     params["default_streams"] = {}

+     for ns in default_streams:

+         n, s = ns.split(":")

+         params["default_streams"][n] = s

+     if srpms:

+         params["srpms"] = srpms

+ 

+     username = getpass.getuser()

+     if not yaml_file or not yaml_file.endswith(".yaml"):

+         raise IOError("Provided modulemd file is not a yaml file.")

+ 

+     yaml_file_path = os.path.abspath(yaml_file)

+ 

+     with models.make_db_session(conf) as db_session:

          if offline:

-             import_builds_from_local_dnf_repos(platform_id)

-         load_local_builds(local_build_nsvs)

- 

-         params = {}

-         params["local_build"] = True

-         params["default_streams"] = {}

-         for ns in default_streams:

-             n, s = ns.split(":")

-             params["default_streams"][n] = s

-         if srpms:

-             params["srpms"] = srpms

- 

-         username = getpass.getuser()

-         if not yaml_file or not yaml_file.endswith(".yaml"):

-             raise IOError("Provided modulemd file is not a yaml file.")

- 

-         yaml_file_path = os.path.abspath(yaml_file)

+             import_builds_from_local_dnf_repos(db_session, platform_id)

+         load_local_builds(db_session, local_build_nsvs)

+ 

          with open(yaml_file_path) as fd:

              filename = os.path.basename(yaml_file)

              handle = FileStorage(fd)

              handle.filename = filename

              try:

                  modules_list = submit_module_build_from_yaml(

-                     username, handle, params, stream=str(stream), skiptests=skiptests

+                     db_session, username, handle, params,

+                     stream=str(stream), skiptests=skiptests

                  )

              except StreamAmbigous as e:

                  logging.error(str(e))

                  logging.error("Use '-s module_name:module_stream' to choose the stream")

                  return

  

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         stop = module_build_service.scheduler.make_simple_stop_condition(db_session)

  

-         # Run the consumer until stop_condition returns True

-         module_build_service.scheduler.main([], stop)

+     # Run the consumer until stop_condition returns True

+     module_build_service.scheduler.main([], stop)

  

-         if any(module.state == models.BUILD_STATES["failed"] for module in modules_list):

-             raise RuntimeError("Module build failed")

+     if any(module.state == models.BUILD_STATES["failed"] for module in modules_list):

+         raise RuntimeError("Module build failed")

  

  

  @manager.option(
@@ -225,27 +228,29 @@ 

      if len(parts) >= 4:

          filter_by_kwargs["context"] = parts[3]

  

-     # Find module builds to retire

-     module_builds = db.session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all()

+     with models.make_db_session(conf) as db_session:

+         # Find module builds to retire

+         module_builds = db_session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all()

+ 

+         if not module_builds:

+             logging.info("No module builds found.")

+             return

  

-     if not module_builds:

-         logging.info("No module builds found.")

-         return

+         logging.info("Found %d module builds:", len(module_builds))

+         for build in module_builds:

+             logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context)))

  

-     logging.info("Found %d module builds:", len(module_builds))

-     for build in module_builds:

-         logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context)))

+         # Prompt for confirmation

+         is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds)))

+         if not is_confirmed:

+             logging.info("Module builds were NOT retired.")

+             return

  

-     # Prompt for confirmation

-     is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds)))

-     if not is_confirmed:

-         logging.info("Module builds were NOT retired.")

-         return

+         # Retire module builds

+         for build in module_builds:

+             build.transition(

+                 db_session, conf, models.BUILD_STATES["garbage"], "Module build retired")

  

-     # Retire module builds

-     for build in module_builds:

-         build.transition(conf, models.BUILD_STATES["garbage"], "Module build retired")

-     db.session.commit()

      logging.info("Module builds retired.")

  

  

file modified
+198 -132
@@ -35,13 +35,13 @@ 

  

  import sqlalchemy

  import kobo.rpmlib

- from flask import has_app_context

  from sqlalchemy import func, and_

  from sqlalchemy.orm import lazyload

  from sqlalchemy.orm import validates, scoped_session, sessionmaker, load_only

+ from sqlalchemy.pool import NullPool

  

  import module_build_service.messaging

- from module_build_service import db, log, get_url_for, app, conf

+ from module_build_service import db, log, get_url_for, conf

  from module_build_service.errors import UnprocessableEntity

  

  DEFAULT_MODULE_CONTEXT = "00000000"
@@ -107,57 +107,109 @@ 

      yield None

  

  

- def _setup_event_listeners(session):

+ def _setup_event_listeners(db_session):

      """

      Starts listening for events related to database session.

      """

-     if not sqlalchemy.event.contains(session, "before_commit", session_before_commit_handlers):

-         sqlalchemy.event.listen(session, "before_commit", session_before_commit_handlers)

+     if not sqlalchemy.event.contains(db_session, "before_commit", session_before_commit_handlers):

+         sqlalchemy.event.listen(db_session, "before_commit", session_before_commit_handlers)

  

      # initialize DB event listeners from the monitor module

      from module_build_service.monitor import db_hook_event_listeners

  

-     db_hook_event_listeners(session.bind.engine)

+     db_hook_event_listeners(db_session.bind.engine)

  

  

- @contextlib.contextmanager

- def make_session(conf):

-     """

-     Yields new SQLAlchemy database sesssion.

-     """

- 

-     # Do not use scoped_session in case we are using in-memory database,

-     # because we want to use the same session across all threads to be able

-     # to use the same in-memory database in tests.

-     if conf.sqlalchemy_database_uri == "sqlite://":

-         _setup_event_listeners(db.session)

-         yield db.session

-         db.session.commit()

-         return

- 

-     # Needs to be set to create app_context.

-     if not has_app_context() and ("SERVER_NAME" not in app.config or not app.config["SERVER_NAME"]):

-         app.config["SERVER_NAME"] = "localhost"

- 

-     # If there is no app_context, we have to create one before creating

-     # the session. If we would create app_context after the session (this

-     # happens in get_url_for() method), new concurrent session would be

-     # created and this would lead to "database is locked" error for SQLite.

-     with app.app_context() if not has_app_context() else _dummy_context_mgr():

+ def apply_engine_options(conf):

+     options = {

+         "configuration": {"sqlalchemy.url": conf.sqlalchemy_database_uri},

+     }

+     if conf.sqlalchemy_database_uri.startswith("sqlite://"):

+         options.update({

+             # For local module build, MBS is actually a multi-threaded

+             # application. The command submitting a module build runs in its

+             # own thread, and the backend build workflow, implemented as a

+             # fedmsg consumer on top of fedmsg-hub, runs in separate threads.

+             # So, disable this option in order to allow accessing data which

+             # was written from another thread.

+             "connect_args": {'check_same_thread': False},

+ 

+             # Both local module build and running tests requires a file-based

+             # SQLite database, we do not use a connection pool for these two

+             # scenario.

+             "poolclass": NullPool,

+         })

+     else:

          # TODO - we could use ZopeTransactionExtension() here some day for

          # improved safety on the backend.

-         engine = sqlalchemy.engine_from_config({"sqlalchemy.url": conf.sqlalchemy_database_uri})

-         session = scoped_session(sessionmaker(bind=engine))()

-         _setup_event_listeners(session)

-         try:

-             yield session

-             session.commit()

-         except Exception:

-             # This is a no-op if no transaction is in progress.

-             session.rollback()

-             raise

-         finally:

-             session.close()

+         pool_options = {}

+ 

+         # Apply pool options SQLALCHEMY_* set in MBS config.

+         # Abbrev sa stands for SQLAlchemy.

+         def apply_mbs_option(mbs_config_key, sa_config_key):

+             value = getattr(conf, mbs_config_key, None)

+             if value is not None:

+                 pool_options[sa_config_key] = value

+ 

+         apply_mbs_option("sqlalchemy_pool_size", "pool_size")

+         apply_mbs_option("sqlalchemy_pool_timeout", "pool_timeout")

+         apply_mbs_option("sqlalchemy_pool_recycle", "pool_recycle")

+         apply_mbs_option("sqlalchemy_max_overflow", "max_overflow")

+ 

+         options.update(pool_options)

+ 

+     return options

+ 

+ 

+ def create_sa_session(conf):

+     """Create a SQLAlchemy session object"""

+     engine_opts = apply_engine_options(conf)

+     engine = sqlalchemy.engine_from_config(**engine_opts)

+     session = scoped_session(sessionmaker(bind=engine))()

+     return session

+ 

+ 

+ @contextlib.contextmanager

+ def make_db_session(conf):

+     """Yields new SQLAlchemy database session.

+ 

+     MBS is actually a multiple threads application consisting of several

+     components. For a deployment instance, the REST API (implemented by Flask)

+     and build workflow (implemented as a fedmsg-hub consumer), which run in

+     different threads. For building a module locally, MBS runs in a similar

+     scenario, the CLI submits module build and then the build workflow starts

+     in its own thread.

+ 

+     The code of REST API uses session object managed by Flask-SQLAlchemy, and

+     other components use a plain SQLAlchemy session object created by this

+     function.

+ 

+     To support building a module both remotely and locally, this function

+     handles a session for both SQLite and PostgreSQL. For the scenario working

+     with SQLite, check_same_thread must be set to False so that queries are

+     allowed to access data created inside other threads.

+ 

+     **Note that**: MBS uses ``autocommit=False`` mode.

+     """

+     session = create_sa_session(conf)

+     _setup_event_listeners(session)

+ 

+     try:

+         # TODO: maybe this could be rewritten in an alternative way to allow

+         #       the caller to pass a flag back to indicate if all transactions

+         #       are handled already by itself, then it is not necessary to call

+         #       following commit unconditionally.

+         yield session

+ 

+         # Always commit whatever there is opening transaction.

+         # FIXME: Would it be a performance issue from the database side?

+         session.commit()

+     except Exception:

+         # This is a no-op if no transaction is in progress.

+         session.rollback()

+         raise

+     finally:

+         session.close()

  

  

  class MBSBase(db.Model):
@@ -296,26 +348,26 @@ 

              ]

  

      @staticmethod

-     def get_by_id(session, module_build_id):

+     def get_by_id(db_session, module_build_id):

          """Find out a module build by id and return

  

-         :param session: SQLAlchemy database session object.

+         :param db_session: SQLAlchemy database session object.

          :param int module_build_id: the module build id to find out.

          :return: the found module build. None is returned if no module build

              with specified id in database.

          :rtype: :class:`ModuleBuild`

          """

-         return session.query(ModuleBuild).filter(ModuleBuild.id == module_build_id).first()

+         return db_session.query(ModuleBuild).filter(ModuleBuild.id == module_build_id).first()

  

      @staticmethod

-     def get_last_build_in_all_streams(session, name):

+     def get_last_build_in_all_streams(db_session, name):

          """

          Returns list of all latest ModuleBuilds in "ready" state for all

          streams for given module `name`.

          """

          # Prepare the subquery to find out all unique name:stream records.

          subq = (

-             session.query(

+             db_session.query(

                  func.max(ModuleBuild.id).label("maxid"),

                  func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)),

              )
@@ -325,14 +377,15 @@ 

          )

  

          # Use the subquery to actually return all the columns for its results.

-         query = session.query(ModuleBuild).join(subq, and_(ModuleBuild.id == subq.c.maxid))

+         query = db_session.query(ModuleBuild).join(

+             subq, and_(ModuleBuild.id == subq.c.maxid))

          return query.all()

  

      @staticmethod

-     def _get_last_builds_in_stream_query(session, name, stream, **kwargs):

+     def _get_last_builds_in_stream_query(db_session, name, stream, **kwargs):

          # Prepare the subquery to find out all unique name:stream records.

          subq = (

-             session.query(

+             db_session.query(

                  func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion")

              )

              .filter_by(name=name, state=BUILD_STATES["ready"], stream=stream, **kwargs)
@@ -340,7 +393,7 @@ 

          )

  

          # Use the subquery to actually return all the columns for its results.

-         query = session.query(ModuleBuild).join(

+         query = db_session.query(ModuleBuild).join(

              subq,

              and_(

                  ModuleBuild.name == name,
@@ -351,11 +404,11 @@ 

          return query

  

      @staticmethod

-     def get_last_builds_in_stream(session, name, stream, virtual_streams=None, **kwargs):

+     def get_last_builds_in_stream(db_session, name, stream, virtual_streams=None, **kwargs):

          """

          Returns the latest builds in "ready" state for given name:stream.

  

-         :param session: SQLAlchemy session.

+         :param db_session: SQLAlchemy session.

          :param str name: Name of the module to search builds for.

          :param str stream: Stream of the module to search builds for.

          :param list virtual_streams: a list of the virtual streams to filter on. The filtering uses
@@ -366,56 +419,57 @@ 

          """

          # Prepare the subquery to find out all unique name:stream records.

  

-         query = ModuleBuild._get_last_builds_in_stream_query(session, name, stream, **kwargs)

-         query = ModuleBuild._add_virtual_streams_filter(session, query, virtual_streams)

+         query = ModuleBuild._get_last_builds_in_stream_query(db_session, name, stream, **kwargs)

+         query = ModuleBuild._add_virtual_streams_filter(db_session, query, virtual_streams)

          return query.all()

  

      @staticmethod

-     def get_last_build_in_stream(session, name, stream, **kwargs):

+     def get_last_build_in_stream(db_session, name, stream, **kwargs):

          """

          Returns the latest build in "ready" state for given name:stream.

  

-         :param session: SQLAlchemy session.

+         :param db_session: SQLAlchemy session.

          :param str name: Name of the module to search builds for.

          :param str stream: Stream of the module to search builds for.

          :param dict kwargs: Key/value pairs passed to SQLAlchmey filter_by method

              allowing to set additional filter for results.

          """

-         return ModuleBuild._get_last_builds_in_stream_query(session, name, stream, **kwargs).first()

+         return ModuleBuild._get_last_builds_in_stream_query(

+             db_session, name, stream, **kwargs).first()

  

      @staticmethod

-     def get_build_from_nsvc(session, name, stream, version, context, **kwargs):

+     def get_build_from_nsvc(db_session, name, stream, version, context, **kwargs):

          """

          Returns build defined by NSVC. Optional kwargs are passed to SQLAlchemy

          filter_by method.

          """

          return (

-             session.query(ModuleBuild)

+             db_session.query(ModuleBuild)

              .filter_by(name=name, stream=stream, version=str(version), context=context, **kwargs)

              .first()

          )

  

      @staticmethod

-     def get_scratch_builds_from_nsvc(session, name, stream, version, context, **kwargs):

+     def get_scratch_builds_from_nsvc(db_session, name, stream, version, context, **kwargs):

          """

          Returns all scratch builds defined by NSVC. This is done by using the supplied `context`

          as a match prefix. Optional kwargs are passed to SQLAlchemy filter_by method.

          """

          return (

-             session.query(ModuleBuild)

+             db_session.query(ModuleBuild)

              .filter_by(name=name, stream=stream, version=str(version), scratch=True, **kwargs)

              .filter(ModuleBuild.context.like(context + "%"))

              .all()

          )

  

      @staticmethod

-     def _add_stream_version_lte_filter(session, query, stream_version):

+     def _add_stream_version_lte_filter(db_session, query, stream_version):

          """

          Adds a less than or equal to filter for stream versions based on x.y.z versioning.

  

          In essence, the filter does `XX0000 <= stream_version <= XXYYZZ`

  

-         :param session: a SQLAlchemy session

+         :param db_session: a SQLAlchemy session

          :param query: a SQLAlchemy query to add the filtering to

          :param int stream_version: the stream version to filter on

          :return: the query with the added stream version filter
@@ -430,11 +484,11 @@ 

              ModuleBuild.stream_version >= min_stream_version)

  

      @staticmethod

-     def _add_virtual_streams_filter(session, query, virtual_streams):

+     def _add_virtual_streams_filter(db_session, query, virtual_streams):

          """

          Adds a filter on ModuleBuild.virtual_streams to an existing query.

  

-         :param session: a SQLAlchemy session

+         :param db_session: a SQLAlchemy session

          :param query: a SQLAlchemy query to add the filtering to

          :param list virtual_streams: a list of the virtual streams to filter on. The filtering uses

              "or" logic. When falsy, no filtering occurs.
@@ -447,7 +501,7 @@ 

          # streams. Using distinct is necessary since a module build may contain multiple virtual

          # streams that are desired.

          modules_with_virtual_streams = (

-             session.query(ModuleBuild)

+             db_session.query(ModuleBuild)

              .join(VirtualStream, ModuleBuild.virtual_streams)

              .filter(VirtualStream.name.in_(virtual_streams))

              .order_by(ModuleBuild.id)
@@ -461,7 +515,7 @@ 

  

      @staticmethod

      def get_last_builds_in_stream_version_lte(

-             session, name, stream_version=None, virtual_streams=None, states=None):

+             db_session, name, stream_version=None, virtual_streams=None, states=None):

          """

          Returns the latest builds in "ready" state for given name:stream limited by

          `stream_version`. The `stream_version` is int generated by `get_stream_version(...)`
@@ -469,7 +523,7 @@ 

          The builds returned by this method are limited by stream_version XX.YY.ZZ like this:

              "XX0000 <= build.stream_version <= XXYYZZ".

  

-         :param session: SQLAlchemy session.

+         :param db_session: SQLAlchemy session.

          :param str name: Name of the module to search builds for.

          :param int stream_version: Maximum stream_version to search builds for. When None,

              the stream_version is not limited.
@@ -478,14 +532,14 @@ 

          """

          states = states or [BUILD_STATES["ready"]]

          query = (

-             session.query(ModuleBuild)

+             db_session.query(ModuleBuild)

              .filter(ModuleBuild.name == name)

              .filter(ModuleBuild.state.in_(states))

              .order_by(sqlalchemy.cast(ModuleBuild.version, db.BigInteger).desc())

          )

  

-         query = ModuleBuild._add_stream_version_lte_filter(session, query, stream_version)

-         query = ModuleBuild._add_virtual_streams_filter(session, query, virtual_streams)

+         query = ModuleBuild._add_stream_version_lte_filter(db_session, query, stream_version)

+         query = ModuleBuild._add_virtual_streams_filter(db_session, query, virtual_streams)

  

          builds = query.all()

  
@@ -510,20 +564,20 @@ 

          return ret

  

      @staticmethod

-     def get_module_count(session, **kwargs):

+     def get_module_count(db_session, **kwargs):

          """

          Determine the number of modules that match the provided filter.

  

-         :param session: SQLAlchemy session

+         :param db_session: SQLAlchemy session

          :return: the number of modules that match the provided filter

          :rtype: int

          """

-         return session.query(func.count(ModuleBuild.id)).filter_by(**kwargs).scalar()

+         return db_session.query(func.count(ModuleBuild.id)).filter_by(**kwargs).scalar()

  

      @staticmethod

-     def get_build_by_koji_tag(session, tag):

+     def get_build_by_koji_tag(db_session, tag):

          """Get build by its koji_tag"""

-         return session.query(ModuleBuild).filter_by(koji_tag=tag).first()

+         return db_session.query(ModuleBuild).filter_by(koji_tag=tag).first()

  

      def mmd(self):

          from module_build_service.utils import load_mmd
@@ -559,9 +613,9 @@ 

          return rebuild_strategy

  

      @classmethod

-     def from_module_event(cls, session, event):

+     def from_module_event(cls, db_session, event):

          if type(event) == module_build_service.messaging.MBSModule:

-             return session.query(cls).filter(cls.id == event.module_build_id).first()

+             return db_session.query(cls).filter(cls.id == event.module_build_id).first()

          else:

              raise ValueError("%r is not a module message." % type(event).__name__)

  
@@ -628,15 +682,16 @@ 

  

          return tuple(rv)

  

-     @property

-     def siblings(self):

-         query = (

-             self.query.filter_by(

-                 name=self.name, stream=self.stream, version=self.version, scratch=self.scratch)

-             .options(load_only("id"))

-             .filter(ModuleBuild.id != self.id)

-         )

-         return [build.id for build in query.all()]

+     def siblings(self, db_session):

+         query = db_session.query(ModuleBuild).filter(

+             ModuleBuild.name == self.name,

+             ModuleBuild.stream == self.stream,

+             ModuleBuild.version == self.version,

+             ModuleBuild.scratch == self.scratch,

+             ModuleBuild.id != self.id,

+         ).options(load_only("id"))

+         siblings_ids = [build.id for build in query.all()]

+         return siblings_ids

  

      @property

      def nvr(self):
@@ -653,7 +708,7 @@ 

      @classmethod

      def create(

          cls,

-         session,

+         db_session,

          conf,

          name,

          stream,
@@ -691,21 +746,23 @@ 

          module.module_builds_trace.append(mbt)

  

          # Record the base modules this module buildrequires

-         for base_module in module.get_buildrequired_base_modules(session):

+         for base_module in module.get_buildrequired_base_modules(db_session):

              module.buildrequires.append(base_module)

  

-         session.add(module)

-         session.commit()

+         db_session.add(module)

+         db_session.commit()

+ 

          if publish_msg:

              module_build_service.messaging.publish(

                  service="mbs",

                  topic="module.state.change",

-                 msg=module.json(show_tasks=False),  # Note the state is "init" here...

+                 msg=module.json(db_session, show_tasks=False),  # Note the state is "init" here...

                  conf=conf,

              )

+ 

          return module

  

-     def transition(self, conf, state, state_reason=None, failure_type="unspec"):

+     def transition(self, db_session, conf, state, state_reason=None, failure_type="unspec"):

          """Record that a build has transitioned state.

  

          The history of state transitions are recorded in model
@@ -713,6 +770,7 @@ 

          from ``build`` to ``done``, message will be sent to configured message

          bus.

  

+         :param db_session: SQLAlchemy session object.

          :param conf: MBS config object returned from function :func:`init_config`

              which contains loaded configs.

          :type conf: :class:`Config`
@@ -746,12 +804,12 @@ 

              module_build_service.messaging.publish(

                  service="mbs",

                  topic="module.state.change",

-                 msg=self.json(show_tasks=False),

+                 msg=self.json(db_session, show_tasks=False),

                  conf=conf,

              )

  

      @classmethod

-     def local_modules(cls, session, name=None, stream=None):

+     def local_modules(cls, db_session, name=None, stream=None):

          """

          Returns list of local module builds added by

          utils.load_local_builds(...). When `name` or `stream` is set,
@@ -769,7 +827,7 @@ 

              filters["name"] = name

          if stream:

              filters["stream"] = stream

-         local_modules = session.query(ModuleBuild).filter_by(**filters).all()

+         local_modules = db_session.query(ModuleBuild).filter_by(**filters).all()

          if not local_modules:

              return []

  
@@ -779,11 +837,18 @@ 

          return local_modules

  

      @classmethod

-     def by_state(cls, session, state):

-         return session.query(ModuleBuild).filter_by(state=BUILD_STATES[state]).all()

+     def by_state(cls, db_session, state):

+         """Get module builds by state

+ 

+         :param db_session: SQLAlchemy session object.

+         :param str state: state name. Refer to key names of ``models.BUILD_STATES``.

+         :return: a list of module builds in the specified state.

+         :rtype: list[:class:`ModuleBuild`]

+         """

+         return db_session.query(ModuleBuild).filter_by(state=BUILD_STATES[state]).all()

  

      @classmethod

-     def from_repo_done_event(cls, session, event):

+     def from_repo_done_event(cls, db_session, event):

          """ Find the ModuleBuilds in our database that should be in-flight...

          ... for a given koji tag.

  
@@ -794,7 +859,7 @@ 

          else:

              tag = event.repo_tag

          query = (

-             session.query(cls)

+             db_session.query(cls)

              .filter(cls.koji_tag == tag)

              .filter(cls.state == BUILD_STATES["build"])

          )
@@ -806,10 +871,10 @@ 

          return query.first()

  

      @classmethod

-     def from_tag_change_event(cls, session, event):

+     def from_tag_change_event(cls, db_session, event):

          tag = event.tag[:-6] if event.tag.endswith("-build") else event.tag

          query = (

-             session.query(cls)

+             db_session.query(cls)

              .filter(cls.koji_tag == tag)

              .filter(cls.state == BUILD_STATES["build"])

          )
@@ -836,7 +901,7 @@ 

              rv["scratch"] = self.scratch

          return rv

  

-     def json(self, show_tasks=True):

+     def json(self, db_session, show_tasks=True):

          mmd = self.mmd()

          xmd = mmd.get_xmd()

          buildrequires = xmd.get("mbs", {}).get("buildrequires", {})
@@ -848,7 +913,7 @@ 

              "rebuild_strategy": self.rebuild_strategy,

              "scmurl": self.scmurl,

              "srpms": json.loads(self.srpms or "[]"),

-             "siblings": self.siblings,

+             "siblings": self.siblings(db_session),

              "state_reason": self.state_reason,

              "time_completed": _utc_datetime_to_iso(self.time_completed),

              "time_modified": _utc_datetime_to_iso(self.time_modified),
@@ -856,10 +921,10 @@ 

              "buildrequires": buildrequires,

          })

          if show_tasks:

-             rv["tasks"] = self.tasks()

+             rv["tasks"] = self.tasks(db_session)

          return rv

  

-     def extended_json(self, show_state_url=False, api_version=1):

+     def extended_json(self, db_session, show_state_url=False, api_version=1):

          """

          :kwarg show_state_url: this will determine if `get_url_for` should be run to determine

          what the `state_url` is. This should be set to `False` when extended_json is called from
@@ -867,7 +932,7 @@ 

          SQLAlchemy sessions.

          :kwarg api_version: the API version to use when building the state URL

          """

-         rv = self.json(show_tasks=True)

+         rv = self.json(db_session, show_tasks=True)

          state_url = None

          if show_state_url:

              state_url = get_url_for("module_build", api_version=api_version, id=self.id)
@@ -886,7 +951,7 @@ 

                      "state_name": INVERSE_BUILD_STATES[record.state],

                      "reason": record.state_reason,

                  }

-                 for record in self.state_trace(self.id)

+                 for record in self.state_trace(db_session, self.id)

              ],

              "state_url": state_url,

              "stream_version": self.stream_version,
@@ -896,14 +961,14 @@ 

  

          return rv

  

-     def tasks(self):

+     def tasks(self, db_session):

          """

          :return: dictionary containing the tasks associated with the build

          """

          tasks = dict()

          if self.id and self.state != "init":

              for build in (

-                 ComponentBuild.query.filter_by(module_id=self.id)

+                 db_session.query(ComponentBuild).filter_by(module_id=self.id)

                  .options(lazyload("module_build"))

                  .all()

              ):
@@ -919,9 +984,9 @@ 

  

          return tasks

  

-     def state_trace(self, module_id):

+     def state_trace(self, db_session, module_id):

          return (

-             ModuleBuildTrace.query.filter_by(module_id=module_id)

+             db_session.query(ModuleBuildTrace).filter_by(module_id=module_id)

              .order_by(ModuleBuildTrace.state_time)

              .all()

          )
@@ -973,11 +1038,11 @@ 

  

              return result

  

-     def get_buildrequired_base_modules(self, session):

+     def get_buildrequired_base_modules(self, db_session):

          """

          Find the base modules in the modulemd's xmd section.

  

-         :param session: the SQLAlchemy database session to use to query

+         :param db_session: the SQLAlchemy database session to use to query

          :return: a list of ModuleBuild objects of the base modules that are buildrequired with the

              ordering in conf.base_module_names preserved

          :rtype: list
@@ -994,7 +1059,7 @@ 

              if not bm_dict:

                  continue

              base_module = self.get_build_from_nsvc(

-                 session, bm, bm_dict["stream"], bm_dict["version"], bm_dict["context"]

+                 db_session, bm, bm_dict["stream"], bm_dict["version"], bm_dict["context"]

              )

              if not base_module:

                  log.error(
@@ -1056,7 +1121,7 @@ 

  

      module_build = db.relationship("ModuleBuild", backref="module_builds_trace", lazy=False)

  

-     def json(self):

+     def json(self, db_session):

          retval = {

              "id": self.id,

              "module_id": self.module_id,
@@ -1113,35 +1178,36 @@ 

      weight = db.Column(db.Float, default=0)

  

      @classmethod

-     def from_component_event(cls, session, event):

+     def from_component_event(cls, db_session, event):

          if isinstance(event, module_build_service.messaging.KojiBuildChange):

              if event.module_build_id:

                  return (

-                     session.query(cls)

+                     db_session.query(cls)

                      .filter_by(task_id=event.task_id, module_id=event.module_build_id)

                      .one()

                  )

              else:

-                 return session.query(cls).filter(cls.task_id == event.task_id).first()

+                 return db_session.query(cls).filter(cls.task_id == event.task_id).first()

          else:

              raise ValueError("%r is not a koji message." % event["topic"])

  

      @classmethod

-     def from_component_name(cls, session, component_name, module_id):

-         return session.query(cls).filter_by(package=component_name, module_id=module_id).first()

+     def from_component_name(cls, db_session, component_name, module_id):

+         return db_session.query(cls).filter_by(package=component_name, module_id=module_id).first()

  

      @classmethod

-     def from_component_nvr(cls, session, nvr, module_id):

-         return session.query(cls).filter_by(nvr=nvr, module_id=module_id).first()

+     def from_component_nvr(cls, db_session, nvr, module_id):

+         return db_session.query(cls).filter_by(nvr=nvr, module_id=module_id).first()

  

-     def state_trace(self, component_id):

+     def state_trace(self, db_session, component_id):

+         # FIXME: remove argument component_id, just use self.id

          return (

-             ComponentBuildTrace.query.filter_by(component_id=component_id)

+             db_session.query(ComponentBuildTrace).filter_by(component_id=component_id)

              .order_by(ComponentBuildTrace.state_time)

              .all()

          )

  

-     def json(self):

+     def json(self, db_session):

          retval = {

              "id": self.id,

              "package": self.package,
@@ -1164,7 +1230,7 @@ 

  

          return retval

  

-     def extended_json(self, show_state_url=False, api_version=1):

+     def extended_json(self, db_session, show_state_url=False, api_version=1):

          """

          :kwarg show_state_url: this will determine if `get_url_for` should be run to determine

          what the `state_url` is. This should be set to `False` when extended_json is called from
@@ -1172,7 +1238,7 @@ 

          SQLAlchemy sessions.

          :kwarg api_version: the API version to use when building the state URL

          """

-         json = self.json()

+         json = self.json(db_session)

          state_url = None

          if show_state_url:

              state_url = get_url_for("component_build", api_version=api_version, id=self.id)
@@ -1185,7 +1251,7 @@ 

                      "state_name": INVERSE_BUILD_STATES[record.state],

                      "reason": record.state_reason,

                  }

-                 for record in self.state_trace(self.id)

+                 for record in self.state_trace(db_session, self.id)

              ],

              "state_url": state_url,

          })
@@ -1216,7 +1282,7 @@ 

          "ComponentBuild", backref="component_builds_trace", lazy=False

      )

  

-     def json(self):

+     def json(self, db_session):

          retval = {

              "id": self.id,

              "component_id": self.component_id,
@@ -1259,7 +1325,7 @@ 

  

  @sqlalchemy.event.listens_for(ModuleBuild, "before_insert")

  @sqlalchemy.event.listens_for(ModuleBuild, "before_update")

- def new_and_update_module_handler(mapper, session, target):

+ def new_and_update_module_handler(mapper, db_session, target):

      # Only modify time_modified if it wasn't explicitly set

      if not db.inspect(target).get_history("time_modified", True).has_changes():

          target.time_modified = datetime.utcnow()

@@ -39,21 +39,22 @@ 

  

      backend = "db"

  

-     def __init__(self, config):

+     def __init__(self, db_session, config):

+         self.db_session = db_session

          self.config = config

  

      def get_module(

-         self, name, stream, version, context, state=models.BUILD_STATES["ready"], strict=False

+         self, name, stream, version, context,

+         state=models.BUILD_STATES["ready"], strict=False

      ):

-         with models.make_session(self.config) as session:

-             mb = models.ModuleBuild.get_build_from_nsvc(

-                 session, name, stream, version, context, state=state)

-             if mb:

-                 return mb.extended_json()

+         mb = models.ModuleBuild.get_build_from_nsvc(

+             self.db_session, name, stream, version, context, state=state)

+         if mb:

+             return mb.extended_json(self.db_session)

  

-             if strict:

-                 raise UnprocessableEntity(

-                     "Cannot find any module builds for %s:%s" % (name, stream))

+         if strict:

+             raise UnprocessableEntity(

+                 "Cannot find any module builds for %s:%s" % (name, stream))

  

      def get_module_count(self, **kwargs):

          """
@@ -62,8 +63,7 @@ 

          :return: the number of modules that match the provided filter

          :rtype: int

          """

-         with models.make_session(self.config) as session:

-             return models.ModuleBuild.get_module_count(session, **kwargs)

+         return models.ModuleBuild.get_module_count(self.db_session, **kwargs)

  

      def get_latest_with_virtual_stream(self, name, virtual_stream):

          """
@@ -74,17 +74,17 @@ 

          :return: the module's modulemd or None

          :rtype: Modulemd.ModuleStream or None

          """

-         with models.make_session(self.config) as session:

-             query = session.query(models.ModuleBuild).filter_by(name=name)

-             query = models.ModuleBuild._add_virtual_streams_filter(session, query, [virtual_stream])

-             # Cast the version as an integer so that we get proper ordering

-             module = query.order_by(

-                 models.ModuleBuild.stream_version.desc(),

-                 sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc(),

-             ).first()

- 

-             if module:

-                 return load_mmd(module.modulemd)

+         query = self.db_session.query(models.ModuleBuild).filter_by(name=name)

+         query = models.ModuleBuild._add_virtual_streams_filter(

+             self.db_session, query, [virtual_stream])

+         # Cast the version as an integer so that we get proper ordering

+         module = query.order_by(

+             models.ModuleBuild.stream_version.desc(),

+             sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc(),

+         ).first()

+ 

+         if module:

+             return load_mmd(module.modulemd)

  

      def get_module_modulemds(self, name, stream, version=None, context=None, strict=False):

          """
@@ -105,17 +105,16 @@ 

                  return

              return [load_mmd(mmd["modulemd"])]

  

-         with models.make_session(self.config) as session:

-             if not version and not context:

-                 builds = models.ModuleBuild.get_last_builds_in_stream(session, name, stream)

-             else:

-                 raise NotImplementedError(

-                     "This combination of name/stream/version/context is not implemented")

+         if not version and not context:

+             builds = models.ModuleBuild.get_last_builds_in_stream(self.db_session, name, stream)

+         else:

+             raise NotImplementedError(

+                 "This combination of name/stream/version/context is not implemented")

  

-             if not builds and strict:

-                 raise UnprocessableEntity(

-                     "Cannot find any module builds for %s:%s" % (name, stream))

-             return [build.mmd() for build in builds]

+         if not builds and strict:

+             raise UnprocessableEntity(

+                 "Cannot find any module builds for %s:%s" % (name, stream))

+         return [build.mmd() for build in builds]

  

      def get_compatible_base_module_modulemds(

          self, name, stream, stream_version_lte, virtual_streams, states
@@ -141,21 +140,20 @@ 

              be in.

          """

          builds = []

-         with models.make_session(self.config) as session:

-             stream_version = None

-             if stream_version_lte:

-                 stream_in_xyz_format = len(str(models.ModuleBuild.get_stream_version(

-                     stream, right_pad=False))) >= 5

-                 if stream_in_xyz_format:

-                     stream_version = models.ModuleBuild.get_stream_version(stream)

-                 else:

-                     log.warning(

-                         "Cannot get compatible base modules, because stream_version_lte is used, "

-                         "but stream %r is not in x.y.z format." % stream)

-             builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

-                 session, name, stream_version, virtual_streams, states)

- 

-             return [build.mmd() for build in builds]

+         stream_version = None

+         if stream_version_lte:

+             stream_in_xyz_format = len(str(models.ModuleBuild.get_stream_version(

+                 stream, right_pad=False))) >= 5

+             if stream_in_xyz_format:

+                 stream_version = models.ModuleBuild.get_stream_version(stream)

+             else:

+                 log.warning(

+                     "Cannot get compatible base modules, because stream_version_lte is used, "

+                     "but stream %r is not in x.y.z format." % stream)

+         builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

+             self.db_session, name, stream_version, virtual_streams, states)

+ 

+         return [build.mmd() for build in builds]

  

      def get_buildrequired_modulemds(self, name, stream, base_module_nsvc):

          """
@@ -170,51 +168,50 @@ 

          :return: List of modulemd metadata.

          """

          log.debug("Looking for %s:%s buildrequiring %s", name, stream, base_module_nsvc)

-         with models.make_session(self.config) as session:

-             query = session.query(models.ModuleBuild)

-             query = query.filter_by(name=name, stream=stream, state=models.BUILD_STATES["ready"])

- 

-             module_br_alias = aliased(models.ModuleBuild, name="module_br")

-             # Shorten this table name for clarity in the query below

-             mb_to_br = models.module_builds_to_module_buildrequires

-             # The following joins get added:

-             # JOIN module_builds_to_module_buildrequires

-             #     ON module_builds_to_module_buildrequires.module_id = module_builds.id

-             # JOIN module_builds AS module_br

-             #     ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id

-             query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join(

-                 module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)

- 

-             # Get only modules buildrequiring particular base_module_nsvc

-             n, s, v, c = base_module_nsvc.split(":")

-             query = query.filter(

-                 module_br_alias.name == n,

-                 module_br_alias.stream == s,

-                 module_br_alias.version == v,

-                 module_br_alias.context == c,

-             )

-             query = query.order_by(

-                 sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc())

-             all_builds = query.all()

- 

-             # The `all_builds` list contains builds sorted by "build.version". We need only

-             # the builds with latest version, but in all contexts.

-             builds = []

-             latest_version = None

-             for build in all_builds:

-                 if latest_version is None:

-                     latest_version = build.version

-                 if latest_version != build.version:

-                     break

-                 builds.append(build)

- 

-             mmds = [build.mmd() for build in builds]

-             nsvcs = [

-                 mmd.get_nsvc()

-                 for mmd in mmds

-             ]

-             log.debug("Found: %r", nsvcs)

-             return mmds

+         query = self.db_session.query(models.ModuleBuild)

+         query = query.filter_by(name=name, stream=stream, state=models.BUILD_STATES["ready"])

+ 

+         module_br_alias = aliased(models.ModuleBuild, name="module_br")

+         # Shorten this table name for clarity in the query below

+         mb_to_br = models.module_builds_to_module_buildrequires

+         # The following joins get added:

+         # JOIN module_builds_to_module_buildrequires

+         #     ON module_builds_to_module_buildrequires.module_id = module_builds.id

+         # JOIN module_builds AS module_br

+         #     ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id

+         query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join(

+             module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)

+ 

+         # Get only modules buildrequiring particular base_module_nsvc

+         n, s, v, c = base_module_nsvc.split(":")

+         query = query.filter(

+             module_br_alias.name == n,

+             module_br_alias.stream == s,

+             module_br_alias.version == v,

+             module_br_alias.context == c,

+         )

+         query = query.order_by(

+             sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc())

+         all_builds = query.all()

+ 

+         # The `all_builds` list contains builds sorted by "build.version". We need only

+         # the builds with latest version, but in all contexts.

+         builds = []

+         latest_version = None

+         for build in all_builds:

+             if latest_version is None:

+                 latest_version = build.version

+             if latest_version != build.version:

+                 break

+             builds.append(build)

+ 

+         mmds = [build.mmd() for build in builds]

+         nsvcs = [

+             mmd.get_nsvc()

+             for mmd in mmds

+         ]

+         log.debug("Found: %r", nsvcs)

+         return mmds

  

      def resolve_profiles(self, mmd, keys):

          """
@@ -230,44 +227,43 @@ 

          results = {}

          for key in keys:

              results[key] = set()

-         with models.make_session(self.config) as session:

-             for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():

-                 local_modules = models.ModuleBuild.local_modules(

-                     session, module_name, module_info["stream"])

-                 if local_modules:

-                     local_module = local_modules[0]

-                     log.info("Using local module {0!r} to resolve profiles.".format(local_module))

-                     dep_mmd = local_module.mmd()

-                     for key in keys:

-                         profile = dep_mmd.get_profile(key)

-                         if profile:

-                             results[key] |= set(profile.get_rpms())

-                     continue

- 

-                 build = models.ModuleBuild.get_build_from_nsvc(

-                     session,

-                     module_name,

-                     module_info["stream"],

-                     module_info["version"],

-                     module_info["context"],

-                     state=models.BUILD_STATES["ready"],

-                 )

-                 if not build:

-                     raise UnprocessableEntity(

-                         "The module {}:{}:{}:{} was not found".format(

-                             module_name,

-                             module_info["stream"],

-                             module_info["version"],

-                             module_info["context"],

-                         )

-                     )

-                 dep_mmd = build.mmd()

- 

-                 # Take note of what rpms are in this dep's profile

+         for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, module_name, module_info["stream"])

+             if local_modules:

+                 local_module = local_modules[0]

+                 log.info("Using local module {0!r} to resolve profiles.".format(local_module))

+                 dep_mmd = local_module.mmd()

                  for key in keys:

                      profile = dep_mmd.get_profile(key)

                      if profile:

                          results[key] |= set(profile.get_rpms())

+                 continue

+ 

+             build = models.ModuleBuild.get_build_from_nsvc(

+                 self.db_session,

+                 module_name,

+                 module_info["stream"],

+                 module_info["version"],

+                 module_info["context"],

+                 state=models.BUILD_STATES["ready"],

+             )

+             if not build:

+                 raise UnprocessableEntity(

+                     "The module {}:{}:{}:{} was not found".format(

+                         module_name,

+                         module_info["stream"],

+                         module_info["version"],

+                         module_info["context"],

+                     )

+                 )

+             dep_mmd = build.mmd()

+ 

+             # Take note of what rpms are in this dep's profile

+             for key in keys:

+                 profile = dep_mmd.get_profile(key)

+                 if profile:

+                     results[key] |= set(profile.get_rpms())

  

          # Return the union of all rpms in all profiles of the given keys

          return results
@@ -304,53 +300,52 @@ 

              )

  

          module_tags = {}

-         with models.make_session(self.config) as session:

-             if mmd:

-                 queried_mmd = mmd

-                 nsvc = ":".join([

-                     mmd.get_module_name(),

-                     mmd.get_stream_name(),

-                     str(mmd.get_version()),

-                     mmd.get_context() or models.DEFAULT_MODULE_CONTEXT,

-                 ])

-             else:

-                 build = models.ModuleBuild.get_build_from_nsvc(

-                     session, name, stream, version, context)

-                 if not build:

-                     raise UnprocessableEntity(

-                         "The module {} was not found".format(

-                             ":".join([name, stream, version, context]))

-                     )

-                 queried_mmd = build.mmd()

-                 nsvc = ":".join([name, stream, version, context])

- 

-             xmd_mbs = queried_mmd.get_xmd().get("mbs", {})

-             if "buildrequires" not in xmd_mbs:

-                 raise RuntimeError(

-                     "The module {} did not contain its modulemd or did not have "

-                     "its xmd attribute filled out in MBS".format(nsvc)

+         if mmd:

+             queried_mmd = mmd

+             nsvc = ":".join([

+                 mmd.get_module_name(),

+                 mmd.get_stream_name(),

+                 str(mmd.get_version()),

+                 mmd.get_context() or models.DEFAULT_MODULE_CONTEXT,

+             ])

+         else:

+             build = models.ModuleBuild.get_build_from_nsvc(

+                 self.db_session, name, stream, version, context)

+             if not build:

+                 raise UnprocessableEntity(

+                     "The module {} was not found".format(

+                         ":".join([name, stream, version, context]))

                  )

+             queried_mmd = build.mmd()

+             nsvc = ":".join([name, stream, version, context])

+ 

+         xmd_mbs = queried_mmd.get_xmd().get("mbs", {})

+         if "buildrequires" not in xmd_mbs:

+             raise RuntimeError(

+                 "The module {} did not contain its modulemd or did not have "

+                 "its xmd attribute filled out in MBS".format(nsvc)

+             )

  

-             buildrequires = xmd_mbs["buildrequires"]

-             for br_name, details in buildrequires.items():

-                 build = models.ModuleBuild.get_build_from_nsvc(

-                     session,

-                     br_name,

-                     details["stream"],

-                     details["version"],

-                     details["context"],

-                     state=models.BUILD_STATES["ready"],

-                 )

-                 if not build:

-                     raise RuntimeError(

-                         "Buildrequired module %s %r does not exist in MBS db" % (br_name, details))

+         buildrequires = xmd_mbs["buildrequires"]

+         for br_name, details in buildrequires.items():

+             build = models.ModuleBuild.get_build_from_nsvc(

+                 self.db_session,

+                 br_name,

+                 details["stream"],

+                 details["version"],

+                 details["context"],

+                 state=models.BUILD_STATES["ready"],

+             )

+             if not build:

+                 raise RuntimeError(

+                     "Buildrequired module %s %r does not exist in MBS db" % (br_name, details))

  

-                 # If the buildrequire is a meta-data only module with no Koji tag set, then just

-                 # skip it

-                 if build.koji_tag is None:

-                     continue

-                 module_tags.setdefault(build.koji_tag, [])

-                 module_tags[build.koji_tag].append(build.mmd())

+             # If the buildrequire is a meta-data only module with no Koji tag set, then just

+             # skip it

+             if build.koji_tag is None:

+                 continue

+             module_tags.setdefault(build.koji_tag, [])

+             module_tags[build.koji_tag].append(build.mmd())

  

          return module_tags

  
@@ -366,70 +361,68 @@ 

          :return: a dictionary

          """

          new_requires = {}

-         with models.make_session(self.config) as session:

-             for nsvc in requires:

-                 nsvc_splitted = nsvc.split(":")

-                 if len(nsvc_splitted) == 2:

-                     module_name, module_stream = nsvc_splitted

-                     module_version = None

-                     module_context = None

-                 elif len(nsvc_splitted) == 4:

-                     module_name, module_stream, module_version, module_context = nsvc_splitted

-                 else:

-                     raise ValueError(

-                         "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)

- 

-                 local_modules = models.ModuleBuild.local_modules(

-                     session, module_name, module_stream)

-                 if local_modules:

-                     local_build = local_modules[0]

-                     new_requires[module_name] = {

-                         "ref": None,

-                         "stream": local_build.stream,

-                         "version": local_build.version,

-                         "context": local_build.context,

-                         "koji_tag": local_build.koji_tag,

-                     }

-                     continue

- 

-                 if module_version is None or module_context is None:

-                     build = models.ModuleBuild.get_last_build_in_stream(

-                         session, module_name, module_stream)

-                 else:

-                     build = models.ModuleBuild.get_build_from_nsvc(

-                         session, module_name, module_stream, module_version, module_context)

- 

-                 if not build:

-                     raise UnprocessableEntity("The module {} was not found".format(nsvc))

- 

-                 commit_hash = None

-                 mmd = build.mmd()

-                 mbs_xmd = mmd.get_xmd().get("mbs", {})

-                 if mbs_xmd.get("commit"):

-                     commit_hash = mbs_xmd["commit"]

-                 else:

-                     raise RuntimeError(

-                         'The module "{0}" didn\'t contain a commit hash in its xmd'.format(

-                             module_name)

-                     )

- 

-                 if not mbs_xmd.get("mse"):

-                     raise RuntimeError(

-                         'The module "{}" is not built using Module Stream Expansion. '

-                         "Please rebuild this module first".format(nsvc)

-                     )

+         for nsvc in requires:

+             nsvc_splitted = nsvc.split(":")

+             if len(nsvc_splitted) == 2:

+                 module_name, module_stream = nsvc_splitted

+                 module_version = None

+                 module_context = None

+             elif len(nsvc_splitted) == 4:

+                 module_name, module_stream, module_version, module_context = nsvc_splitted

+             else:

+                 raise ValueError(

+                     "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)

  

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, module_name, module_stream)

+             if local_modules:

+                 local_build = local_modules[0]

                  new_requires[module_name] = {

-                     "ref": commit_hash,

-                     "stream": module_stream,

-                     "version": build.version,

-                     "context": build.context,

-                     "koji_tag": build.koji_tag,

+                     "ref": None,

+                     "stream": local_build.stream,

+                     "version": local_build.version,

+                     "context": local_build.context,

+                     "koji_tag": local_build.koji_tag,

                  }

+                 continue

+ 

+             if module_version is None or module_context is None:

+                 build = models.ModuleBuild.get_last_build_in_stream(

+                     self.db_session, module_name, module_stream)

+             else:

+                 build = models.ModuleBuild.get_build_from_nsvc(

+                     self.db_session, module_name, module_stream, module_version, module_context)

+ 

+             if not build:

+                 raise UnprocessableEntity("The module {} was not found".format(nsvc))

+ 

+             commit_hash = None

+             mmd = build.mmd()

+             mbs_xmd = mmd.get_xmd().get("mbs", {})

+             if mbs_xmd.get("commit"):

+                 commit_hash = mbs_xmd["commit"]

+             else:

+                 raise RuntimeError(

+                     'The module "{0}" didn\'t contain a commit hash in its xmd'.format(

+                         module_name)

+                 )

+ 

+             if not mbs_xmd.get("mse"):

+                 raise RuntimeError(

+                     'The module "{}" is not built using Module Stream Expansion. '

+                     "Please rebuild this module first".format(nsvc)

+                 )

+ 

+             new_requires[module_name] = {

+                 "ref": commit_hash,

+                 "stream": module_stream,

+                 "version": build.version,

+                 "context": build.context,

+                 "koji_tag": build.koji_tag,

+             }

  

          return new_requires

  

      def get_modulemd_by_koji_tag(self, tag):

-         with models.make_session(self.config) as session:

-             module = models.ModuleBuild.get_build_by_koji_tag(session, tag)

-             return module.mmd() if module else None

+         module = models.ModuleBuild.get_build_by_koji_tag(self.db_session, tag)

+         return module.mmd() if module else None

@@ -28,7 +28,7 @@ 

  import logging

  import kobo.rpmlib

  

- from module_build_service import db, conf

+ from module_build_service import conf

  from module_build_service import models

  from module_build_service.errors import UnprocessableEntity

  from module_build_service.resolver.base import GenericResolver
@@ -42,7 +42,8 @@ 

  

      backend = "mbs"

  

-     def __init__(self, config):

+     def __init__(self, db_session, config):

+         self.db_session = db_session

          self.mbs_prod_url = config.mbs_url

          self._generic_error = "Failed to query MBS with query %r returned HTTP status %s"

  
@@ -195,7 +196,7 @@ 

          """

          yaml = None

  

-         local_modules = models.ModuleBuild.local_modules(db.session, name, stream)

+         local_modules = models.ModuleBuild.local_modules(self.db_session, name, stream)

          if local_modules:

              return [m.mmd() for m in local_modules]

  
@@ -291,7 +292,7 @@ 

              results[key] = set()

          for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():

              local_modules = models.ModuleBuild.local_modules(

-                 db.session, module_name, module_info["stream"])

+                 self.db_session, module_name, module_info["stream"])

              if local_modules:

                  local_module = local_modules[0]

                  log.info("Using local module %r to resolve profiles.", local_module)
@@ -375,7 +376,8 @@ 

          buildrequires = queried_mmd.get_xmd()["mbs"]["buildrequires"]

          # Queue up the next tier of deps that we should look at..

          for name, details in buildrequires.items():

-             local_modules = models.ModuleBuild.local_modules(db.session, name, details["stream"])

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, name, details["stream"])

              if local_modules:

                  for m in local_modules:

                      # If the buildrequire is a meta-data only module with no Koji tag set, then just
@@ -426,7 +428,8 @@ 

                      "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)

              # Try to find out module dependency in the local module builds

              # added by utils.load_local_builds(...).

-             local_modules = models.ModuleBuild.local_modules(db.session, module_name, module_stream)

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, module_name, module_stream)

              if local_modules:

                  local_build = local_modules[0]

                  new_requires[module_name] = {
@@ -488,7 +491,7 @@ 

              # If the module is a base module, then import it in the database so that entries in

              # the module_builds_to_module_buildrequires table can be created later on

              if module_name in conf.base_module_names:

-                 import_mmd(db.session, mmd)

+                 import_mmd(self.db_session, mmd)

  

          return new_requires

  

@@ -23,7 +23,6 @@ 

  

  import pkg_resources

  

- from module_build_service import conf

  from module_build_service.resolver.base import GenericResolver

  

  # NOTE: if you are adding a new resolver to MBS please note that you also have to add
@@ -33,8 +32,3 @@ 

  

  if not GenericResolver.backends:

      raise ValueError("No resolver plugins are installed or available.")

- 

- # Config has the option of which resolver should be used for current MBS run.

- # Hence, create a singleton system wide resolver for use. However, resolver

- # could be created with other required arguments in concrete cases.

- system_resolver = GenericResolver.create(conf)

@@ -52,7 +52,7 @@ 

          GenericResolver.backends[backend_class.backend] = backend_class

  

      @classmethod

-     def create(cls, config, backend=None, **extra):

+     def create(cls, db_session, config, backend=None, **extra):

          """Factory method to create a resolver object

  

          :param config: MBS config object.
@@ -71,7 +71,7 @@ 

              backend = conf.resolver

  

          if backend in GenericResolver.backends:

-             return GenericResolver.backends[backend](config, **extra)

+             return GenericResolver.backends[backend](db_session, config, **extra)

          else:

              raise ValueError("Resolver backend='%s' not recognized" % backend)

  

@@ -44,7 +44,7 @@ 

      )

  

  

- def make_simple_stop_condition(session):

+ def make_simple_stop_condition(db_session):

      """ Return a simple stop_condition callable.

  

      Intended to be used with the main() function here in manage.py and tests.
@@ -58,7 +58,7 @@ 

  

          # Grab the latest module build.

          module = (

-             session.query(module_build_service.models.ModuleBuild)

+             db_session.query(module_build_service.models.ModuleBuild)

              .order_by(module_build_service.models.ModuleBuild.id.desc())

              .first()

          )

@@ -105,7 +105,7 @@ 

  

          # These are our main lookup tables for figuring out what to run in

          # response to what messaging events.

-         self.NO_OP = NO_OP = lambda config, session, msg: True

+         self.NO_OP = NO_OP = lambda config, db_session, msg: True

          self.on_build_change = {

              koji.BUILD_STATES["BUILDING"]: NO_OP,

              koji.BUILD_STATES[
@@ -165,8 +165,8 @@ 

  

          # Primary work is done here.

          try:

-             with models.make_session(conf) as session:

-                 self.process_message(session, msg)

+             with models.make_db_session(conf) as db_session:

+                 self.process_message(db_session, msg)

              monitor.messaging_rx_processed_ok_counter.inc()

          except sqlalchemy.exc.OperationalError as error:

              monitor.messaging_rx_failed_counter.inc()
@@ -205,7 +205,7 @@ 

  

          all_fns = list(self.on_build_change.items()) + list(self.on_module_change.items())

          for key, callback in all_fns:

-             expected = ["config", "session", "msg"]

+             expected = ["config", "db_session", "msg"]

              if six.PY2:

                  argspec = inspect.getargspec(callback)[0]

              else:
@@ -214,28 +214,28 @@ 

                  raise ValueError(

                      "Callback %r, state %r has argspec %r!=%r" % (callback, key, argspec, expected))

  

-     def process_message(self, session, msg):

+     def process_message(self, db_session, msg):

          # set module build to None and let's populate it later

          build = None

  

          # Choose a handler for this message

          if isinstance(msg, module_build_service.messaging.KojiBuildChange):

              handler = self.on_build_change[msg.build_new_state]

-             build = models.ComponentBuild.from_component_event(session, msg)

+             build = models.ComponentBuild.from_component_event(db_session, msg)

              if build:

                  build = build.module_build

          elif type(msg) == module_build_service.messaging.KojiRepoChange:

              handler = self.on_repo_change

-             build = models.ModuleBuild.from_repo_done_event(session, msg)

+             build = models.ModuleBuild.from_repo_done_event(db_session, msg)

          elif type(msg) == module_build_service.messaging.KojiTagChange:

              handler = self.on_tag_change

-             build = models.ModuleBuild.from_tag_change_event(session, msg)

+             build = models.ModuleBuild.from_tag_change_event(db_session, msg)

          elif type(msg) == module_build_service.messaging.MBSModule:

              handler = self.on_module_change[module_build_state_from_msg(msg)]

-             build = models.ModuleBuild.from_module_event(session, msg)

+             build = models.ModuleBuild.from_module_event(db_session, msg)

          elif type(msg) == module_build_service.messaging.GreenwaveDecisionUpdate:

              handler = self.on_decision_update

-             build = greenwave.get_corresponding_module_build(session, msg.subject_identifier)

+             build = greenwave.get_corresponding_module_build(db_session, msg.subject_identifier)

          else:

              return

  
@@ -253,20 +253,21 @@ 

              log.info("Calling %s" % idx)

              further_work = []

              try:

-                 further_work = handler(conf, session, msg) or []

+                 further_work = handler(conf, db_session, msg) or []

              except Exception as e:

                  msg = "Could not process message handler. See the traceback."

                  log.exception(msg)

-                 session.rollback()

+                 db_session.rollback()

                  if build:

-                     session.refresh(build)

+                     db_session.refresh(build)

                      build.transition(

+                         db_session,

                          conf,

                          state=models.BUILD_STATES["failed"],

                          state_reason=str(e),

                          failure_type="infra",

                      )

-                     session.commit()

+                     db_session.commit()

  

              log.debug("Done with %s" % idx)

  

@@ -30,8 +30,8 @@ 

      koji_retrying_multicall_map, KojiModuleBuilder,

  )

  from module_build_service.errors import UnprocessableEntity

+ from module_build_service.resolver.base import GenericResolver

  from module_build_service.utils.request_utils import requests_session

- from module_build_service.resolver import system_resolver as resolver

  

  

  def add_default_modules(db_session, mmd, arches):
@@ -130,6 +130,7 @@ 

                  # Only one default module is processed at a time in resolve_requires so that we

                  # are aware of which modules are not in the database, and can add those that are as

                  # buildrequires.

+                 resolver = GenericResolver.create(db_session, conf)

                  resolved = resolver.resolve_requires([default_module])

              except UnprocessableEntity:

                  log.warning(

@@ -34,11 +34,11 @@ 

  logging.basicConfig(level=logging.DEBUG)

  

  

- def _finalize(config, session, msg, state):

+ def _finalize(config, db_session, msg, state):

      """ Called whenever a koji build completes or fails. """

  

      # First, find our ModuleBuild associated with this component, if any.

-     component_build = models.ComponentBuild.from_component_event(session, msg)

+     component_build = models.ComponentBuild.from_component_event(db_session, msg)

      try:

          nvr = "{}-{}-{}".format(msg.build_name, msg.build_version, msg.build_release)

      except KeyError:
@@ -61,19 +61,20 @@ 

      component_build.state = state

      component_build.nvr = nvr

      component_build.state_reason = state_reason

-     session.commit()

+     db_session.commit()

  

      parent = component_build.module_build

  

      # If the macro build failed, then the module is doomed.

      if component_build.package == "module-build-macros" and state != koji.BUILD_STATES["COMPLETE"]:

          parent.transition(

+             db_session,

              config,

              state=models.BUILD_STATES["failed"],

              state_reason=state_reason,

              failure_type="user",

          )

-         session.commit()

+         db_session.commit()

          return

  

      further_work = []
@@ -96,7 +97,7 @@ 

          ]

  

          builder = module_build_service.builder.GenericBuilder.create_from_module(

-             session, parent, config

+             db_session, parent, config

          )

  

          if failed_components_in_batch:
@@ -107,12 +108,13 @@ 

              state_reason = "Component(s) {} failed to build.".format(

                  ", ".join(c.package for c in failed_components_in_batch))

              parent.transition(

+                 db_session,

                  config,

                  state=models.BUILD_STATES["failed"],

                  state_reason=state_reason,

                  failure_type="user",

              )

-             session.commit()

+             db_session.commit()

              return []

          elif not built_components_in_batch:

              # If there are no successfully built components in a batch, there is nothing to tag.
@@ -148,7 +150,7 @@ 

              if component_nvrs_to_tag_in_dest:

                  builder.tag_artifacts(component_nvrs_to_tag_in_dest)

  

-         session.commit()

+         db_session.commit()

      elif any([c.state != koji.BUILD_STATES["BUILDING"] for c in unbuilt_components_in_batch]):

          # We are not in the middle of the batch building and

          # we have some unbuilt components in this batch. We might hit the
@@ -157,19 +159,19 @@ 

          # build, try to call continue_batch_build again so in case we hit the

          # threshold previously, we will submit another build from this batch.

          builder = module_build_service.builder.GenericBuilder.create_from_module(

-             session, parent, config)

+             db_session, parent, config)

          further_work += module_build_service.utils.continue_batch_build(

-             config, parent, session, builder)

+             config, parent, db_session, builder)

      return further_work

  

  

- def complete(config, session, msg):

-     return _finalize(config, session, msg, state=koji.BUILD_STATES["COMPLETE"])

+ def complete(config, db_session, msg):

+     return _finalize(config, db_session, msg, state=koji.BUILD_STATES["COMPLETE"])

  

  

- def failed(config, session, msg):

-     return _finalize(config, session, msg, state=koji.BUILD_STATES["FAILED"])

+ def failed(config, db_session, msg):

+     return _finalize(config, db_session, msg, state=koji.BUILD_STATES["FAILED"])

  

  

- def canceled(config, session, msg):

-     return _finalize(config, session, msg, state=koji.BUILD_STATES["CANCELED"])

+ def canceled(config, db_session, msg):

+     return _finalize(config, db_session, msg, state=koji.BUILD_STATES["CANCELED"])

@@ -26,7 +26,7 @@ 

  from module_build_service.models import ModuleBuild, BUILD_STATES

  

  

- def get_corresponding_module_build(session, nvr):

+ def get_corresponding_module_build(db_session, nvr):

      """Find corresponding module build from database and return

  

      :param session: the SQLAlchemy database session object.
@@ -49,16 +49,16 @@ 

          # handling Greenwave event.

          return None

  

-     return ModuleBuild.get_by_id(session, module_build_id)

+     return ModuleBuild.get_by_id(db_session, module_build_id)

  

  

- def decision_update(config, session, msg):

+ def decision_update(config, db_session, msg):

      """Move module build to ready or failed according to Greenwave result

  

      :param config: the config object returned from function :func:`init_config`,

          which is loaded from configuration file.

      :type config: :class:`Config`

-     :param session: the SQLAlchemy database session object.

+     :param db_session: the SQLAlchemy database session object.

      :param msg: the message object representing a message received from topic

          ``greenwave.decision.update``.

      :type msg: :class:`GreenwaveDecisionUpdate`
@@ -89,7 +89,7 @@ 

          )

          return

  

-     build = get_corresponding_module_build(session, module_build_nvr)

+     build = get_corresponding_module_build(db_session, module_build_nvr)

  

      if build is None:

          log.debug(
@@ -98,6 +98,7 @@ 

  

      if build.state == BUILD_STATES["done"]:

          build.transition(

+             db_session,

              conf,

              BUILD_STATES["ready"],

              state_reason="Module build {} has satisfied Greenwave policies.".format(
@@ -112,4 +113,4 @@ 

              msg.decision_context,

          )

  

-     session.commit()

+     db_session.commit()

@@ -34,12 +34,13 @@ 

      get_rpm_release,

      generate_koji_tag,

      record_filtered_rpms,

-     record_module_build_arches,

+     record_module_build_arches

  )

  from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError

- from module_build_service.utils.ursine import handle_stream_collision_modules

  from module_build_service.utils.greenwave import greenwave

  from module_build_service.scheduler.default_modules import add_default_modules

+ from module_build_service.utils.submit import format_mmd

+ from module_build_service.utils.ursine import handle_stream_collision_modules

  

  from requests.exceptions import ConnectionError

  from module_build_service.utils import mmd_to_str
@@ -58,7 +59,7 @@ 

      return os.path.basename(srpm_path).replace(".src.rpm", "")

  

  

- def failed(config, session, msg):

+ def failed(config, db_session, msg):

      """

      Called whenever a module enters the 'failed' state.

  
@@ -66,13 +67,12 @@ 

      and stop the building.

      """

  

-     build = models.ModuleBuild.from_module_event(session, msg)

+     build = models.ModuleBuild.from_module_event(db_session, msg)

  

-     module_info = build.json()

-     if module_info["state"] != msg.module_build_state:

+     if build.state != msg.module_build_state:

          log.warning(

              "Note that retrieved module state %r doesn't match message module state %r",

-             module_info["state"], msg.module_build_state,

+             build.state, msg.module_build_state,

          )

          # This is ok.. it's a race condition we can ignore.

          pass
@@ -84,7 +84,7 @@ 

  

      if build.koji_tag:

          builder = module_build_service.builder.GenericBuilder.create_from_module(

-             session, build, config)

+             db_session, build, config)

  

          if build.new_repo_task_id:

              builder.cancel_build(build.new_repo_task_id)
@@ -94,7 +94,7 @@ 

                  builder.cancel_build(component.task_id)

              component.state = koji.BUILD_STATES["FAILED"]

              component.state_reason = build.state_reason

-             session.add(component)

+             db_session.add(component)

  

          # Tell the external buildsystem to wrap up

          builder.finalize(succeeded=False)
@@ -103,21 +103,22 @@ 

          if not build.state_reason:

              reason = "Missing koji tag. Assuming previously failed module lookup."

              log.error(reason)

-             build.transition(config, state="failed", state_reason=reason, failure_type="infra")

-             session.commit()

+             build.transition(

+                 db_session, config, state="failed", state_reason=reason, failure_type="infra")

+             db_session.commit()

              return

  

      # Don't transition it again if it's already been transitioned

      if build.state != models.BUILD_STATES["failed"]:

-         build.transition(config, state="failed", failure_type="user")

+         build.transition(db_session, config, state="failed", failure_type="user")

  

-     session.commit()

+     db_session.commit()

  

      build_logs.stop(build)

      module_build_service.builder.GenericBuilder.clear_cache(build)

  

  

- def done(config, session, msg):

+ def done(config, db_session, msg):

      """Called whenever a module enters the 'done' state.

  

      We currently don't do anything useful, so moving to ready.
@@ -125,12 +126,11 @@ 

      Otherwise the done -> ready state should happen when all

      dependent modules were re-built, at least that's the current plan.

      """

-     build = models.ModuleBuild.from_module_event(session, msg)

-     module_info = build.json()

-     if module_info["state"] != msg.module_build_state:

+     build = models.ModuleBuild.from_module_event(db_session, msg)

+     if build.state != msg.module_build_state:

          log.warning(

              "Note that retrieved module state %r doesn't match message module state %r",

-             module_info["state"], msg.module_build_state,

+             build.state, msg.module_build_state,

          )

          # This is ok.. it's a race condition we can ignore.

          pass
@@ -138,24 +138,24 @@ 

      # Scratch builds stay in 'done' state

      if not build.scratch:

          if greenwave is None or greenwave.check_gating(build):

-             build.transition(config, state="ready")

+             build.transition(db_session, config, state="ready")

          else:

              build.state_reason = "Gating failed"

              if greenwave.error_occurred:

                  build.state_reason += " (Error occured while querying Greenwave)"

              build.time_modified = datetime.utcnow()

-         session.commit()

+         db_session.commit()

  

      build_logs.stop(build)

      module_build_service.builder.GenericBuilder.clear_cache(build)

  

  

- def init(config, session, msg):

+ def init(config, db_session, msg):

      """ Called whenever a module enters the 'init' state."""

      # Sleep for a few seconds to make sure the module in the database is committed

      # TODO: Remove this once messaging is implemented in SQLAlchemy hooks

      for i in range(3):

-         build = models.ModuleBuild.from_module_event(session, msg)

+         build = models.ModuleBuild.from_module_event(db_session, msg)

          if build:

              break

          time.sleep(1)
@@ -165,15 +165,21 @@ 

      try:

          mmd = build.mmd()

          arches = [arch.name for arch in build.arches]

-         add_default_modules(session, mmd, arches)

-         record_module_build_arches(mmd, build, session)

-         record_component_builds(mmd, build, session=session)

+         add_default_modules(db_session, mmd, arches)

+         record_module_build_arches(mmd, build, db_session)

+ 

+         # Format the modulemd by putting in defaults and replacing streams that

+         # are branches with commit hashes

+         format_mmd(mmd, build.scmurl, build, db_session)

+         record_component_builds(db_session, mmd, build)

+ 

          # The ursine.handle_stream_collision_modules is Koji specific.

          if conf.system in ["koji", "test"]:

-             handle_stream_collision_modules(mmd)

-         mmd = record_filtered_rpms(mmd)

+             handle_stream_collision_modules(db_session, mmd)

+ 

+         mmd = record_filtered_rpms(db_session, mmd)

          build.modulemd = mmd_to_str(mmd)

-         build.transition(conf, models.BUILD_STATES["wait"])

+         build.transition(db_session, conf, models.BUILD_STATES["wait"])

      # Catch custom exceptions that we can expose to the user

      except (UnprocessableEntity, Forbidden, ValidationError, RuntimeError) as e:

          log.exception(str(e))
@@ -188,13 +194,14 @@ 

          error_msg = "An unknown error occurred while validating the modulemd"

          failure_reason = "user"

      else:

-         session.add(build)

-         session.commit()

+         db_session.add(build)

+         db_session.commit()

      finally:

          if error_msg:

              # Rollback changes underway

-             session.rollback()

+             db_session.rollback()

              build.transition(

+                 db_session,

                  conf,

                  models.BUILD_STATES["failed"],

                  state_reason=error_msg,
@@ -227,16 +234,17 @@ 

  @module_build_service.utils.retry(

      interval=10, timeout=120, wait_on=(ValueError, RuntimeError, ConnectionError)

  )

- def get_module_build_dependencies(build):

+ def get_module_build_dependencies(db_session, build):

      """Used by wait handler to get module's build dependencies

  

+     :param db_session: SQLAlchemy session object.

      :param build: a module build.

      :type build: :class:`ModuleBuild`

      :return: the value returned from :meth:`get_module_build_dependencies`

          according to the configured resolver.

      :rtype: dict[str, Modulemd.Module]

      """

-     resolver = module_build_service.resolver.system_resolver

+     resolver = module_build_service.resolver.GenericResolver.create(db_session, conf)

      if conf.system in ["koji", "test"]:

          # For Koji backend, query for the module we are going to

          # build to get the koji_tag and deps from it.
@@ -283,7 +291,7 @@ 

          return conf.koji_cg_default_build_tag

  

  

- def wait(config, session, msg):

+ def wait(config, db_session, msg):

      """ Called whenever a module enters the 'wait' state.

  

      We transition to this state shortly after a modulebuild is first requested.
@@ -298,14 +306,14 @@ 

      # See https://pagure.io/fm-orchestrator/issue/386

      @module_build_service.utils.retry(interval=10, timeout=120, wait_on=RuntimeError)

      def _get_build_containing_xmd_for_mbs():

-         build = models.ModuleBuild.from_module_event(session, msg)

+         build = models.ModuleBuild.from_module_event(db_session, msg)

          if "mbs" in build.mmd().get_xmd():

              return build

-         session.expire(build)

+         db_session.expire(build)

          raise RuntimeError("{!r} doesn't contain xmd information for MBS.".format(build))

  

      build = _get_build_containing_xmd_for_mbs()

-     build_logs.start(build)

+     build_logs.start(db_session, build)

  

      log.info("Found build=%r from message" % build)

      log.info("%r", build.modulemd)
@@ -319,12 +327,13 @@ 

          pass

  

      try:

-         build_deps = get_module_build_dependencies(build)

+         build_deps = get_module_build_dependencies(db_session, build)

      except ValueError:

          reason = "Failed to get module info from MBS. Max retries reached."

          log.exception(reason)

-         build.transition(config, state="failed", state_reason=reason, failure_type="infra")

-         session.commit()

+         build.transition(

+             db_session, config, state="failed", state_reason=reason, failure_type="infra")

+         db_session.commit()

          raise

  

      tag = generate_module_build_koji_tag(build)
@@ -349,7 +358,8 @@ 

              "It is disabled to tag module build during importing into Koji by Content Generator.")

          log.debug("Skip to assign Content Generator build koji tag to module build.")

  

-     builder = module_build_service.builder.GenericBuilder.create_from_module(session, build, config)

+     builder = module_build_service.builder.GenericBuilder.create_from_module(

+         db_session, build, config)

  

      log.debug(

          "Adding dependencies %s into buildroot for module %s:%s:%s",
@@ -359,9 +369,9 @@ 

  

      if not build.component_builds:

          log.info("There are no components in module %r, skipping build" % build)

-         build.transition(config, state="build")

-         session.add(build)

-         session.commit()

+         build.transition(db_session, config, state="build")

+         db_session.add(build)

+         db_session.commit()

          # Return a KojiRepoChange message so that the build can be transitioned to done

          # in the repos handler

          return [
@@ -371,22 +381,24 @@ 

  

      # If all components in module build will be reused, we don't have to build

      # module-build-macros, because there won't be any build done.

-     if attempt_to_reuse_all_components(builder, session, build):

+     if attempt_to_reuse_all_components(builder, db_session, build):

          log.info("All components have been reused for module %r, skipping build" % build)

-         build.transition(config, state="build")

-         session.add(build)

-         session.commit()

+         build.transition(db_session, config, state="build")

+         db_session.add(build)

+         db_session.commit()

          return []

  

      log.debug("Starting build batch 1")

      build.batch = 1

-     session.commit()

+     db_session.commit()

  

      artifact_name = "module-build-macros"

  

-     component_build = models.ComponentBuild.from_component_name(session, artifact_name, build.id)

+     component_build = models.ComponentBuild.from_component_name(db_session, artifact_name, build.id)

      further_work = []

-     srpm = builder.get_disttag_srpm(disttag=".%s" % get_rpm_release(build), module_build=build)

+     srpm = builder.get_disttag_srpm(

+         disttag=".%s" % get_rpm_release(db_session, build),

+         module_build=build)

      if not component_build:

          component_build = models.ComponentBuild(

              module_id=build.id,
@@ -396,10 +408,10 @@ 

              batch=1,

              build_time_only=True,

          )

-         session.add(component_build)

+         db_session.add(component_build)

          # Commit and refresh so that the SQLAlchemy relationships are available

-         session.commit()

-         session.refresh(component_build)

+         db_session.commit()

+         db_session.refresh(component_build)

          msgs = builder.recover_orphaned_artifact(component_build)

          if msgs:

              log.info("Found an existing module-build-macros build")
@@ -426,17 +438,17 @@ 

              component_build.reason = reason

              component_build.nvr = nvr

  

-     session.add(component_build)

-     build.transition(config, state="build")

-     session.add(build)

-     session.commit()

+     db_session.add(component_build)

+     build.transition(db_session, config, state="build")

+     db_session.add(build)

+     db_session.commit()

  

      # We always have to regenerate the repository.

      if config.system == "koji":

          log.info("Regenerating the repository")

          task_id = builder.koji_session.newRepo(builder.module_build_tag["name"])

          build.new_repo_task_id = task_id

-         session.commit()

+         db_session.commit()

      else:

          further_work.append(

              module_build_service.messaging.KojiRepoChange(

@@ -33,7 +33,7 @@ 

  logging.basicConfig(level=logging.DEBUG)

  

  

- def done(config, session, msg):

+ def done(config, db_session, msg):

      """ Called whenever koji rebuilds a repo, any repo. """

  

      # First, find our ModuleBuild associated with this repo, if any.
@@ -42,7 +42,7 @@ 

          log.debug("Tag %r does not end with '-build' suffix, ignoring" % tag)

          return

      tag = tag[:-6] if tag.endswith("-build") else tag

-     module_build = models.ModuleBuild.from_repo_done_event(session, msg)

+     module_build = models.ModuleBuild.from_repo_done_event(db_session, msg)

      if not module_build:

          log.debug("No module build found associated with koji tag %r" % tag)

          return
@@ -99,15 +99,16 @@ 

          state_reason = "Component(s) {} failed to build.".format(

              ", ".join(c.package for c in current_batch if c.state in failed_states))

          module_build.transition(

-             config, models.BUILD_STATES["failed"], state_reason, failure_type="infra")

-         session.commit()

+             db_session, config, models.BUILD_STATES["failed"], state_reason, failure_type="infra")

+         db_session.commit()

          log.warning("Odd!  All components in batch failed for %r." % module_build)

          return

  

      groups = module_build_service.builder.GenericBuilder.default_buildroot_groups(

-         session, module_build)

+         db_session, module_build)

  

      builder = module_build_service.builder.GenericBuilder.create(

+         db_session,

          module_build.owner,

          module_build,

          config.system,
@@ -141,7 +142,7 @@ 

  

          # Try to start next batch build, because there are still unbuilt

          # components in a module.

-         further_work += start_next_batch_build(config, module_build, session, builder)

+         further_work += start_next_batch_build(config, module_build, db_session, builder)

  

      else:

          if has_failed_components:
@@ -151,6 +152,7 @@ 

                  )

              )

              module_build.transition(

+                 db_session,

                  config,

                  state=models.BUILD_STATES["failed"],

                  state_reason=state_reason,
@@ -161,7 +163,7 @@ 

              module_build.time_completed = datetime.utcnow()

              builder.finalize(succeeded=True)

  

-             module_build.transition(config, state=models.BUILD_STATES["done"])

-         session.commit()

+             module_build.transition(db_session, config, state=models.BUILD_STATES["done"])

+         db_session.commit()

  

      return further_work

@@ -31,20 +31,20 @@ 

  logging.basicConfig(level=logging.DEBUG)

  

  

- def tagged(config, session, msg):

+ def tagged(config, db_session, msg):

      """ Called whenever koji tags a build to tag. """

      if config.system not in ("koji", "test"):

          return []

  

      # Find our ModuleBuild associated with this tagged artifact.

      tag = msg.tag

-     module_build = models.ModuleBuild.from_tag_change_event(session, msg)

+     module_build = models.ModuleBuild.from_tag_change_event(db_session, msg)

      if not module_build:

          log.debug("No module build found associated with koji tag %r" % tag)

          return

  

      # Find tagged component.

-     component = models.ComponentBuild.from_component_nvr(session, msg.nvr, module_build.id)

+     component = models.ComponentBuild.from_component_nvr(db_session, msg.nvr, module_build.id)

      if not component:

          log.error("No component %s in module %r", msg.nvr, module_build)

          return
@@ -56,7 +56,7 @@ 

          component.tagged = True

      else:

          component.tagged_in_final = True

-     session.commit()

+     db_session.commit()

  

      unbuilt_components_in_batch = [

          c for c in module_build.current_batch()
@@ -82,7 +82,7 @@ 

      # If all components are tagged, start newRepo task.

      if not untagged_components:

          builder = module_build_service.builder.GenericBuilder.create_from_module(

-             session, module_build, config)

+             db_session, module_build, config)

  

          unbuilt_components = [

              c for c in module_build.component_builds
@@ -109,7 +109,7 @@ 

                  messaging.KojiRepoChange(

                      "components::_finalize: fake msg", builder.module_build_tag["name"])

              ]

-         session.commit()

+         db_session.commit()

  

      return further_work

  

@@ -43,25 +43,25 @@ 

      frequency = timedelta(seconds=conf.polling_interval)

  

      def poll(self):

-         with models.make_session(conf) as session:

+         with models.make_db_session(conf) as db_session:

              try:

-                 self.log_summary(session)

-                 self.process_waiting_module_builds(session)

-                 self.process_open_component_builds(session)

-                 self.fail_lost_builds(session)

-                 self.process_paused_module_builds(conf, session)

-                 self.retrigger_new_repo_on_failure(conf, session)

-                 self.delete_old_koji_targets(conf, session)

-                 self.cleanup_stale_failed_builds(conf, session)

-                 self.sync_koji_build_tags(conf, session)

-                 self.poll_greenwave(conf, session)

+                 self.log_summary(db_session)

+                 self.process_waiting_module_builds(db_session)

+                 self.process_open_component_builds(db_session)

+                 self.fail_lost_builds(db_session)

+                 self.process_paused_module_builds(conf, db_session)

+                 self.retrigger_new_repo_on_failure(conf, db_session)

+                 self.delete_old_koji_targets(conf, db_session)

+                 self.cleanup_stale_failed_builds(conf, db_session)

+                 self.sync_koji_build_tags(conf, db_session)

+                 self.poll_greenwave(conf, db_session)

              except Exception:

                  msg = "Error in poller execution:"

                  log.exception(msg)

  

          log.info('Poller will now sleep for "{}" seconds'.format(conf.polling_interval))

  

-     def fail_lost_builds(self, session):

+     def fail_lost_builds(self, db_session):

          # This function is supposed to be handling only the part which can't be

          # updated through messaging (e.g. srpm-build failures). Please keep it

          # fit `n` slim. We do want rest to be processed elsewhere
@@ -72,14 +72,15 @@ 

              koji_session = KojiModuleBuilder.get_session(conf, login=False)

              log.info("Querying tasks for statuses:")

              res = (

-                 models.ComponentBuild.query.filter_by(state=koji.BUILD_STATES["BUILDING"])

+                 db_session.query(models.ComponentBuild)

+                 .filter_by(state=koji.BUILD_STATES["BUILDING"])

                  .options(lazyload("module_build"))

                  .all()

              )

  

              log.info("Checking status for {0} tasks".format(len(res)))

              for component_build in res:

-                 log.debug(component_build.json())

+                 log.debug(component_build.json(db_session))

                  # Don't check tasks which haven't been triggered yet

                  if not component_build.task_id:

                      continue
@@ -139,15 +140,15 @@ 

          elif conf.system == "mock":

              pass

  

-     def cleanup_stale_failed_builds(self, conf, session):

+     def cleanup_stale_failed_builds(self, conf, db_session):

          """ Does various clean up tasks on stale failed module builds

          :param conf: the MBS configuration object

-         :param session: a SQLAlchemy database session

+         :param db_session: a SQLAlchemy database session

          """

          if conf.system == "koji":

              stale_date = datetime.utcnow() - timedelta(days=conf.cleanup_failed_builds_time)

              stale_module_builds = (

-                 session.query(models.ModuleBuild)

+                 db_session.query(models.ModuleBuild)

                  .filter(

                      models.ModuleBuild.state == models.BUILD_STATES["failed"],

                      models.ModuleBuild.time_modified <= stale_date,
@@ -170,35 +171,36 @@ 

                  if artifacts:

                      # Set buildroot_connect=False so it doesn't recreate the Koji target and etc.

                      builder = GenericBuilder.create_from_module(

-                         session, module, conf, buildroot_connect=False

+                         db_session, module, conf, buildroot_connect=False

                      )

                      builder.untag_artifacts([c.nvr for c in artifacts])

                      # Mark the artifacts as untagged in the database

                      for c in artifacts:

                          c.tagged = False

                          c.tagged_in_final = False

-                         session.add(c)

+                         db_session.add(c)

                  state_reason = (

                      "The module was garbage collected since it has failed over {0}"

                      " day(s) ago".format(conf.cleanup_failed_builds_time)

                  )

                  module.transition(

+                     db_session,

                      conf,

                      models.BUILD_STATES["garbage"],

                      state_reason=state_reason,

                      failure_type="user",

                  )

-                 session.add(module)

-                 session.commit()

+                 db_session.add(module)

+                 db_session.commit()

  

-     def log_summary(self, session):

+     def log_summary(self, db_session):

          log.info("Current status:")

          consumer = module_build_service.scheduler.consumer.get_global_consumer()

          backlog = consumer.incoming.qsize()

          log.info("  * internal queue backlog is {0}".format(backlog))

          states = sorted(models.BUILD_STATES.items(), key=operator.itemgetter(1))

          for name, code in states:

-             query = models.ModuleBuild.query.filter_by(state=code)

+             query = db_session.query(models.ModuleBuild).filter_by(state=code)

              count = query.count()

              if count:

                  log.info("  * {0} module builds in the {1} state".format(count, name))
@@ -210,14 +212,14 @@ 

                          n = len([c for c in module_build.component_builds if c.batch == i])

                          log.info("      * {0} components in batch {1}".format(n, i))

  

-     def _nudge_module_builds_in_state(self, session, state_name, older_than_minutes):

+     def _nudge_module_builds_in_state(self, db_session, state_name, older_than_minutes):

          """

          Finds all the module builds in the `state` with `time_modified` older

          than `older_than_minutes` and adds fake MBSModule message to the

          work queue.

          """

          log.info("Looking for module builds stuck in the %s state", state_name)

-         builds = models.ModuleBuild.by_state(session, state_name)

+         builds = models.ModuleBuild.by_state(db_session, state_name)

          log.info(" %r module builds in the %s state...", len(builds), state_name)

          now = datetime.utcnow()

          time_modified_threshold = timedelta(minutes=older_than_minutes)
@@ -229,7 +231,6 @@ 

  

              # Pretend the build is modified, so we don't tight spin.

              build.time_modified = now

-             session.commit()

  

              # Fake a message to kickstart the build anew in the consumer

              state = module_build_service.models.BUILD_STATES[state_name]
@@ -238,16 +239,18 @@ 

              log.info("  Scheduling faked event %r" % msg)

              module_build_service.scheduler.consumer.work_queue_put(msg)

  

-     def process_waiting_module_builds(self, session):

+         db_session.commit()

+ 

+     def process_waiting_module_builds(self, db_session):

          for state in ["init", "wait"]:

-             self._nudge_module_builds_in_state(session, state, 10)

+             self._nudge_module_builds_in_state(db_session, state, 10)

  

-     def process_open_component_builds(self, session):

+     def process_open_component_builds(self, db_session):

          log.warning("process_open_component_builds is not yet implemented...")

  

-     def process_paused_module_builds(self, config, session):

+     def process_paused_module_builds(self, config, db_session):

          log.info("Looking for paused module builds in the build state")

-         if module_build_service.utils.at_concurrent_component_threshold(config, session):

+         if module_build_service.utils.at_concurrent_component_threshold(config, db_session):

              log.debug(

                  "Will not attempt to start paused module builds due to "

                  "the concurrent build threshold being met"
@@ -259,7 +262,7 @@ 

          # builds. Exclude module builds in batch 0. This is likely a build of a module without

          # components.

          module_builds = (

-             session.query(models.ModuleBuild)

+             db_session.query(models.ModuleBuild)

              .filter(

                  models.ModuleBuild.state == models.BUILD_STATES["build"],

                  models.ModuleBuild.batch > 0,
@@ -277,20 +280,21 @@ 

              # repo-regen.

              if not module_build.current_batch(koji.BUILD_STATES["BUILDING"]):

                  # Initialize the builder...

-                 builder = GenericBuilder.create_from_module(session, module_build, config)

+                 builder = GenericBuilder.create_from_module(db_session, module_build, config)

+ 

                  if _has_missed_new_repo_message(module_build, builder.koji_session):

                      log.info("  Processing the paused module build %r", module_build)

                      further_work = module_build_service.utils.start_next_batch_build(

-                         config, module_build, session, builder)

+                         config, module_build, db_session, builder)

                      for event in further_work:

                          log.info("  Scheduling faked event %r" % event)

                          module_build_service.scheduler.consumer.work_queue_put(event)

  

              # Check if we have met the threshold.

-             if module_build_service.utils.at_concurrent_component_threshold(config, session):

+             if module_build_service.utils.at_concurrent_component_threshold(config, db_session):

                  break

  

-     def retrigger_new_repo_on_failure(self, config, session):

+     def retrigger_new_repo_on_failure(self, config, db_session):

          """

          Retrigger failed new repo tasks for module builds in the build state.

  
@@ -305,7 +309,7 @@ 

              config)

  

          for module_build in (

-             session.query(models.ModuleBuild).filter_by(state=models.BUILD_STATES["build"]).all()

+             db_session.query(models.ModuleBuild).filter_by(state=models.BUILD_STATES["build"]).all()

          ):

              if not module_build.new_repo_task_id:

                  continue
@@ -319,9 +323,9 @@ 

                  taginfo = koji_session.getTag(module_build.koji_tag + "-build")

                  module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"])

  

-         session.commit()

+         db_session.commit()

  

-     def delete_old_koji_targets(self, config, session):

+     def delete_old_koji_targets(self, config, db_session):

          """

          Deletes targets older than `config.koji_target_delete_time` seconds

          from Koji to cleanup after the module builds.
@@ -336,7 +340,7 @@ 

          koji_session = KojiModuleBuilder.get_session(config)

          for target in koji_session.getBuildTargets():

              koji_tag = target["dest_tag_name"]

-             module = session.query(models.ModuleBuild).filter_by(koji_tag=koji_tag).first()

+             module = db_session.query(models.ModuleBuild).filter_by(koji_tag=koji_tag).first()

              if (

                  not module

                  or module.name in conf.base_module_names
@@ -363,7 +367,7 @@ 

                  log.info("Removing target of module %r", module)

                  koji_session.deleteBuildTarget(target["id"])

  

-     def cancel_stuck_module_builds(self, config, session):

+     def cancel_stuck_module_builds(self, config, db_session):

          """

          Method transitions builds which are stuck in one state too long to the "failed" state.

          The states are defined with the "cleanup_stuck_builds_states" config option and the
@@ -386,7 +390,7 @@ 

          ]

  

          module_builds = (

-             session.query(models.ModuleBuild)

+             db_session.query(models.ModuleBuild)

              .filter(

                  models.ModuleBuild.state.in_(states), models.ModuleBuild.time_modified < threshold

              )
@@ -403,14 +407,15 @@ 

                  state=build.state, days=config.cleanup_stuck_builds_time

              )

              build.transition(

+                 db_session,

                  config,

                  state=models.BUILD_STATES["failed"],

                  state_reason=state_reason,

                  failure_type="user",

              )

-             session.commit()

+             db_session.commit()

  

-     def sync_koji_build_tags(self, config, session):

+     def sync_koji_build_tags(self, config, db_session):

          """

          Method checking the "tagged" and "tagged_in_final" attributes of

          "complete" ComponentBuilds in the current batch of module builds
@@ -425,7 +430,7 @@ 

          koji_session = KojiModuleBuilder.get_session(conf, login=False)

  

          threshold = datetime.utcnow() - timedelta(minutes=10)

-         module_builds = session.query(models.ModuleBuild).filter(

+         module_builds = db_session.query(models.ModuleBuild).filter(

              models.ModuleBuild.time_modified < threshold,

              models.ModuleBuild.state == models.BUILD_STATES["build"]

          ).all()
@@ -466,17 +471,17 @@ 

                      log.info("  Scheduling faked event %r" % msg)

                      module_build_service.scheduler.consumer.work_queue_put(msg)

  

-     def poll_greenwave(self, config, session):

+     def poll_greenwave(self, config, db_session):

          """

          Polls Greenwave for all builds in done state

-         :param session: SQLAlchemy DB session

+         :param db_session: SQLAlchemy DB session

          :return: None

          """

          if greenwave is None:

              return

  

          module_builds = (

-             session.query(models.ModuleBuild)

+             db_session.query(models.ModuleBuild)

              .filter_by(state=models.BUILD_STATES["done"]).all()

          )

  
@@ -484,7 +489,7 @@ 

  

          for build in module_builds:

              if greenwave.check_gating(build):

-                 build.transition(config, state=models.BUILD_STATES["ready"])

+                 build.transition(db_session, config, state=models.BUILD_STATES["ready"])

              else:

                  build.state_reason = "Gating failed (MBS will retry in {0} seconds)".format(

                      conf.polling_interval
@@ -492,7 +497,7 @@ 

                  if greenwave.error_occurred:

                      build.state_reason += " (Error occured while querying Greenwave)"

                  build.time_modified = datetime.utcnow()

-             session.commit()

+             db_session.commit()

  

  

  def _has_missed_new_repo_message(module_build, koji_session):

@@ -22,6 +22,8 @@ 

  # Written by Ralph Bean <rbean@redhat.com>

  #            Matt Prahl <mprahl@redhat.com>

  #            Jan Kaluza <jkaluza@redhat.com>

+ 

+ import threading

  import concurrent.futures

  

  from module_build_service import conf, log, models
@@ -29,12 +31,12 @@ 

  from .reuse import get_reusable_components, reuse_component

  

  

- def at_concurrent_component_threshold(config, session):

+ def at_concurrent_component_threshold(config, db_session):

      """

      Determines if the number of concurrent component builds has reached

      the configured threshold

      :param config: Module Build Service configuration object

-     :param session: SQLAlchemy database session

+     :param db_session: SQLAlchemy database session

      :return: boolean representing if there are too many concurrent builds at

      this time

      """
@@ -57,7 +59,7 @@ 

      # just internally in MBS to be handled by

      # scheduler.handlers.components.complete.

      if config.num_concurrent_builds:

-         count = session.query(models.ComponentBuild).filter_by(

+         count = db_session.query(models.ComponentBuild).filter_by(

              state=koji.BUILD_STATES["BUILDING"], reused_component_id=None).count()

          if config.num_concurrent_builds <= count:

              return True
@@ -65,10 +67,17 @@ 

      return False

  

  

- def start_build_component(builder, c):

+ BUILD_COMPONENT_DB_SESSION_LOCK = threading.Lock()

+ 

+ 

+ def start_build_component(db_session, builder, c):

      """

      Submits single component build to builder. Called in thread

      by QueueBasedThreadPool in continue_batch_build.

+ 

+     This function runs inside separate threads that share one SQLAlchemy

+     session object to update a module build state once there is something wrong

+     when one of its components is submitted to Koji to build.

      """

      import koji

  
@@ -79,17 +88,21 @@ 

          c.state = koji.BUILD_STATES["FAILED"]

          c.state_reason = "Failed to build artifact %s: %s" % (c.package, str(e))

          log.exception(e)

-         c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")

+         with BUILD_COMPONENT_DB_SESSION_LOCK:

+             c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")

+             db_session.commit()

          return

  

      if not c.task_id and c.state == koji.BUILD_STATES["BUILDING"]:

          c.state = koji.BUILD_STATES["FAILED"]

          c.state_reason = "Failed to build artifact %s: Builder did not return task ID" % (c.package)

-         c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")

+         with BUILD_COMPONENT_DB_SESSION_LOCK:

+             c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")

+             db_session.commit()

          return

  

  

- def continue_batch_build(config, module, session, builder, components=None):

+ def continue_batch_build(config, module, db_session, builder, components=None):

      """

      Continues building current batch. Submits next components in the batch

      until it hits concurrent builds limit.
@@ -139,7 +152,7 @@ 

          if c.state == koji.BUILD_STATES["COMPLETE"]:

              continue

          # Check the concurrent build threshold.

-         if at_concurrent_component_threshold(config, session):

+         if at_concurrent_component_threshold(config, db_session):

              log.info("Concurrent build threshold met")

              break

  
@@ -153,7 +166,8 @@ 

      max_workers = config.num_threads_for_build_submissions

      with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:

          futures = {

-             executor.submit(start_build_component, builder, c): c for c in components_to_build

+             executor.submit(start_build_component, db_session, builder, c): c

+             for c in components_to_build

          }

          concurrent.futures.wait(futures)

          # In case there has been an excepion generated directly in the
@@ -162,11 +176,11 @@ 

          for future in futures:

              future.result()

  

-     session.commit()

+     db_session.commit()

      return further_work

  

  

- def start_next_batch_build(config, module, session, builder, components=None):

+ def start_next_batch_build(config, module, db_session, builder, components=None):

      """

      Tries to start the build of next batch. In case there are still unbuilt

      components in a batch, tries to submit more components until it hits
@@ -211,7 +225,7 @@ 

      # the new one. If there is, continue building current batch.

      if has_unbuilt_components_in_batch:

          log.info("Continuing building batch %d", module.batch)

-         return continue_batch_build(config, module, session, builder, components)

+         return continue_batch_build(config, module, db_session, builder, components)

  

      # Check that there are no components in BUILDING state in current batch.

      # If there are, wait until they are built.
@@ -239,12 +253,13 @@ 

              ", ".join([str(t["id"]) for t in active_tasks])

          )

          module.transition(

+             db_session,

              config,

              state=models.BUILD_STATES["failed"],

              state_reason=state_reason,

              failure_type="infra",

          )

-         session.commit()

+         db_session.commit()

          return []

  

      else:
@@ -280,7 +295,7 @@ 

      # the new one. This can happen when resubmitting the failed module build.

      if not unbuilt_components and not components:

          log.info("Skipping build of batch %d, no component to build.", module.batch)

-         return start_next_batch_build(config, module, session, builder)

+         return start_next_batch_build(config, module, db_session, builder)

  

      log.info("Starting build of next batch %d, %s" % (module.batch, unbuilt_components))

  
@@ -297,7 +312,7 @@ 

          should_try_reuse = all_reused_in_prev_batch or prev_batch == 1

      if should_try_reuse:

          component_names = [c.package for c in unbuilt_components]

-         reusable_components = get_reusable_components(session, module, component_names)

+         reusable_components = get_reusable_components(db_session, module, component_names)

          for c, reusable_c in zip(unbuilt_components, reusable_components):

              if reusable_c:

                  components_reused = True
@@ -306,7 +321,7 @@ 

                  unbuilt_components_after_reuse.append(c)

          # Commit the changes done by reuse_component

          if components_reused:

-             session.commit()

+             db_session.commit()

  

      # If all the components were reused in the batch then make a KojiRepoChange

      # message and return
@@ -318,4 +333,4 @@ 

          return further_work

  

      return further_work + continue_batch_build(

-         config, module, session, builder, unbuilt_components_after_reuse)

+         config, module, db_session, builder, unbuilt_components_after_reuse)

@@ -290,9 +290,11 @@ 

      return validation_decorator

  

  

- def get_rpm_release(module_build):

+ def get_rpm_release(db_session, module_build):

      """

      Generates the dist tag for the specified module

+ 

+     :param db_session: SQLAlchemy session object.

      :param module_build: a models.ModuleBuild object

      :return: a string of the module's dist tag

      """
@@ -307,7 +309,7 @@ 

      # We need to share the same auto-incrementing index in dist tag between all MSE builds.

      # We can achieve that by using the lowest build ID of all the MSE siblings including

      # this module build.

-     mse_build_ids = module_build.siblings + [module_build.id or 0]

+     mse_build_ids = module_build.siblings(db_session) + [module_build.id or 0]

      mse_build_ids.sort()

      index = mse_build_ids[0]

      try:
@@ -331,30 +333,29 @@ 

              if not module_in_xmd:

                  continue

  

-             with models.make_session(conf) as session:

-                 module_obj = models.ModuleBuild.get_build_from_nsvc(

-                     session,

-                     module,

-                     module_in_xmd["stream"],

-                     module_in_xmd["version"],

-                     module_in_xmd["context"],

-                 )

-                 if not module_obj:

-                     continue

+             module_obj = models.ModuleBuild.get_build_from_nsvc(

+                 db_session,

+                 module,

+                 module_in_xmd["stream"],

+                 module_in_xmd["version"],

+                 module_in_xmd["context"],

+             )

+             if not module_obj:

+                 continue

  

-                 try:

-                     marking = module_obj.mmd().get_xmd()["mbs"]["disttag_marking"]

-                 # We must check for a KeyError because a Variant object doesn't support the `get`

-                 # method

-                 except KeyError:

-                     if module not in conf.base_module_names:

-                         continue

-                     # If we've made it past all the modules in

-                     # conf.allowed_privileged_module_names, and the base module doesn't have

-                     # the disttag_marking set, then default to the stream of the first base module

-                     marking = module_obj.stream

-                 br_module_marking = marking + "+"

-                 break

+             try:

+                 marking = module_obj.mmd().get_xmd()["mbs"]["disttag_marking"]

+             # We must check for a KeyError because a Variant object doesn't support the `get`

+             # method

+             except KeyError:

+                 if module not in conf.base_module_names:

+                     continue

+                 # If we've made it past all the modules in

+                 # conf.allowed_privileged_module_names, and the base module doesn't have

+                 # the disttag_marking set, then default to the stream of the first base module

+                 marking = module_obj.stream

+             br_module_marking = marking + "+"

+             break

          else:

              log.warning(

                  "Module build {0} does not buildrequire a base module ({1})".format(
@@ -400,7 +401,7 @@ 

      return key_generator

  

  

- def import_mmd(session, mmd, check_buildrequires=True):

+ def import_mmd(db_session, mmd, check_buildrequires=True):

      """

      Imports new module build defined by `mmd` to MBS database using `session`.

      If it already exists, it is updated.
@@ -410,6 +411,7 @@ 

      The ModuleBuild.rebuild_strategy is set to "all".

      The ModuleBuild.owner is set to "mbs_import".

  

+     :param db_session: SQLAlchemy session object.

      :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD

          have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in

          the `ModuleBuild.buildrequires` according to this data.
@@ -492,7 +494,7 @@ 

          log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc))

  

      # Get the ModuleBuild from DB.

-     build = models.ModuleBuild.get_build_from_nsvc(session, name, stream, version, context)

+     build = models.ModuleBuild.get_build_from_nsvc(db_session, name, stream, version, context)

      if build:

          msg = "Updating existing module build {}.".format(nsvc)

          log.info(msg)
@@ -517,25 +519,25 @@ 

  

      # Record the base modules this module buildrequires

      if check_buildrequires:

-         for base_module in build.get_buildrequired_base_modules(session):

+         for base_module in build.get_buildrequired_base_modules(db_session):

              if base_module not in build.buildrequires:

                  build.buildrequires.append(base_module)

  

-     session.add(build)

-     session.commit()

+     db_session.add(build)

+     db_session.commit()

  

      for virtual_stream in virtual_streams:

-         vs_obj = session.query(models.VirtualStream).filter_by(name=virtual_stream).first()

+         vs_obj = db_session.query(models.VirtualStream).filter_by(name=virtual_stream).first()

          if not vs_obj:

              vs_obj = models.VirtualStream(name=virtual_stream)

-             session.add(vs_obj)

-             session.commit()

+             db_session.add(vs_obj)

+             db_session.commit()

  

          if vs_obj not in build.virtual_streams:

              build.virtual_streams.append(vs_obj)

-             session.add(build)

+             db_session.add(build)

  

-     session.commit()

+     db_session.commit()

  

      msg = "Module {} imported".format(nsvc)

      log.info(msg)
@@ -544,10 +546,11 @@ 

      return build, msgs

  

  

- def import_fake_base_module(nsvc):

+ def import_fake_base_module(db_session, nsvc):

      """

      Creates and imports new fake base module to be used with offline local builds.

  

+     :param db_session: SQLAlchemy session object.

      :param str nsvc: name:stream:version:context of a module.

      """

      name, stream, version, context = nsvc.split(":")
@@ -579,8 +582,7 @@ 

      xmd_mbs["koji_tag"] = "repofile://"

      mmd.set_xmd(xmd)

  

-     with models.make_session(conf) as session:

-         import_mmd(session, mmd, False)

+     import_mmd(db_session, mmd, False)

  

  

  def get_local_releasever():
@@ -594,13 +596,14 @@ 

      return dnf_base.conf.releasever

  

  

- def import_builds_from_local_dnf_repos(platform_id=None):

+ def import_builds_from_local_dnf_repos(db_session, platform_id=None):

      """

      Imports the module builds from all available local repositories to MBS DB.

  

      This is used when building modules locally without any access to MBS infra.

      This method also generates and imports the base module according to /etc/os-release.

  

+     :param db_session: SQLAlchemy session object.

      :param str platform_id: The `name:stream` of a fake platform module to generate in this

          method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID.

      """
@@ -612,30 +615,29 @@ 

      dnf_base.read_all_repos()

  

      log.info("Importing available modules to MBS local database.")

-     with models.make_session(conf) as session:

-         for repo in dnf_base.repos.values():

-             try:

-                 repo.load()

-             except Exception as e:

-                 log.warning(str(e))

-                 continue

-             mmd_data = repo.get_metadata_content("modules")

-             mmd_index = Modulemd.ModuleIndex.new()

-             ret, _ = mmd_index.update_from_string(mmd_data, True)

-             if not ret:

-                 log.warning("Loading the repo '%s' failed", repo.name)

-                 continue

- 

-             for module_name in mmd_index.get_module_names():

-                 for mmd in mmd_index.get_module(module_name).get_all_streams():

-                     xmd = mmd.get_xmd()

-                     xmd["mbs"] = {}

-                     xmd["mbs"]["koji_tag"] = "repofile://" + repo.repofile

-                     xmd["mbs"]["mse"] = True

-                     xmd["mbs"]["commit"] = "unknown"

-                     mmd.set_xmd(xmd)

- 

-                     import_mmd(session, mmd, False)

+     for repo in dnf_base.repos.values():

+         try:

+             repo.load()

+         except Exception as e:

+             log.warning(str(e))

+             continue

+         mmd_data = repo.get_metadata_content("modules")

+         mmd_index = Modulemd.ModuleIndex.new()

+         ret, _ = mmd_index.update_from_string(mmd_data, True)

+         if not ret:

+             log.warning("Loading the repo '%s' failed", repo.name)

+             continue

+ 

+         for module_name in mmd_index.get_module_names():

+             for mmd in mmd_index.get_module(module_name).get_all_streams():

+                 xmd = mmd.get_xmd()

+                 xmd["mbs"] = {}

+                 xmd["mbs"]["koji_tag"] = "repofile://" + repo.repofile

+                 xmd["mbs"]["mse"] = True

+                 xmd["mbs"]["commit"] = "unknown"

+                 mmd.set_xmd(xmd)

+ 

+                 import_mmd(db_session, mmd, False)

  

      if not platform_id:

          # Parse the /etc/os-release to find out the local platform:stream.
@@ -650,7 +652,7 @@ 

      # Create the fake platform:stream:1:000000 module to fulfill the

      # dependencies for local offline build and also to define the

      # srpm-buildroot and buildroot.

-     import_fake_base_module("%s:1:000000" % platform_id)

+     import_fake_base_module(db_session, "%s:1:000000" % platform_id)

  

  

  def get_mmd_from_scm(url):
@@ -667,10 +669,11 @@ 

      return mmd

  

  

- def get_build_arches(mmd, config):

+ def get_build_arches(db_session, mmd, config):

      """

      Returns the list of architectures for which the module `mmd` should be built.

  

+     :param db_session: SQLAlchemy session object.

      :param mmd: Module MetaData

      :param config: config (module_build_service.config.Config instance)

      :return list of architectures
@@ -713,28 +716,27 @@ 

          # Looping through all the privileged modules that are allowed to set koji tag arches

          # and the base modules to see what the koji tag arches should be. Doing it this way

          # preserves the order in the configurations.

-         with models.make_session(conf) as session:

-             for module in conf.allowed_privileged_module_names + conf.base_module_names:

-                 module_in_xmd = buildrequires.get(module)

+         for module in conf.allowed_privileged_module_names + conf.base_module_names:

+             module_in_xmd = buildrequires.get(module)

  

-                 if not module_in_xmd:

-                     continue

+             if not module_in_xmd:

+                 continue

  

-                 module_obj = models.ModuleBuild.get_build_from_nsvc(

-                     session,

-                     module,

-                     module_in_xmd["stream"],

-                     module_in_xmd["version"],

-                     module_in_xmd["context"],

-                 )

-                 if not module_obj:

-                     continue

-                 arches = module_build_service.builder.GenericBuilder.get_module_build_arches(

-                     module_obj)

-                 if arches:

-                     log.info("Setting build arches of %s to %r based on the buildrequired "

-                              "module %r." % (nsvc, arches, module_obj))

-                     return arches

+             module_obj = models.ModuleBuild.get_build_from_nsvc(

+                 db_session,

+                 module,

+                 module_in_xmd["stream"],

+                 module_in_xmd["version"],

+                 module_in_xmd["context"],

+             )

+             if not module_obj:

+                 continue

+             arches = module_build_service.builder.GenericBuilder.get_module_build_arches(

+                 module_obj)

+             if arches:

+                 log.info("Setting build arches of %s to %r based on the buildrequired "

+                          "module %r." % (nsvc, arches, module_obj))

+                 return arches

  

      # As a last resort, return just the preconfigured list of arches.

      arches = config.arches

@@ -22,20 +22,20 @@ 

  # Written by Ralph Bean <rbean@redhat.com>

  #            Matt Prahl <mprahl@redhat.com>

  #            Jan Kaluza <jkaluza@redhat.com>

- from module_build_service import log, models, Modulemd, db, conf

+ from module_build_service import log, models, Modulemd, conf

  from module_build_service.errors import StreamAmbigous

  from module_build_service.errors import UnprocessableEntity

  from module_build_service.mmd_resolver import MMDResolver

  from module_build_service.utils.general import deps_to_dict, mmd_to_str

- import module_build_service.resolver

+ from module_build_service.resolver import GenericResolver

  

  

- def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream_ambigous):

+ def _expand_mse_streams(db_session, name, streams, default_streams, raise_if_stream_ambigous):

      """

      Helper method for `expand_mse_stream()` expanding single name:[streams].

      Returns list of expanded streams.

  

-     :param session: SQLAlchemy DB session.

+     :param db_session: SQLAlchemy DB session.

      :param str name: Name of the module which will be expanded.

      :param streams: List of streams to expand.

      :type streams: list[str]
@@ -62,7 +62,7 @@ 

          elif raise_if_stream_ambigous:

              raise StreamAmbigous("There are multiple streams to choose from for module %s." % name)

          else:

-             builds = models.ModuleBuild.get_last_build_in_all_streams(session, name)

+             builds = models.ModuleBuild.get_last_build_in_all_streams(db_session, name)

              expanded_streams = [build.stream for build in builds]

      else:

          expanded_streams = []
@@ -85,11 +85,11 @@ 

      return expanded_streams

  

  

- def expand_mse_streams(session, mmd, default_streams=None, raise_if_stream_ambigous=False):

+ def expand_mse_streams(db_session, mmd, default_streams=None, raise_if_stream_ambigous=False):

      """

      Expands streams in both buildrequires/requires sections of MMD.

  

-     :param session: SQLAlchemy DB session.

+     :param db_session: SQLAlchemy DB session.

      :param Modulemd.ModuleStream mmd: Modulemd metadata with original unexpanded module.

      :param dict default_streams: Dict in {module_name: module_stream, ...} format defining

          the default stream to choose for module in case when there are multiple streams to
@@ -103,7 +103,7 @@ 

          for name in deps.get_runtime_modules():

              streams = deps.get_runtime_streams(name)

              new_streams = _expand_mse_streams(

-                 session, name, streams, default_streams, raise_if_stream_ambigous)

+                 db_session, name, streams, default_streams, raise_if_stream_ambigous)

  

              if new_streams == []:

                  new_deps.set_empty_runtime_dependencies_for_module(name)
@@ -114,7 +114,7 @@ 

          for name in deps.get_buildtime_modules():

              streams = deps.get_buildtime_streams(name)

              new_streams = _expand_mse_streams(

-                 session, name, streams, default_streams, raise_if_stream_ambigous)

+                 db_session, name, streams, default_streams, raise_if_stream_ambigous)

  

              if new_streams == []:

                  new_deps.set_empty_buildtime_dependencies_for_module(name)
@@ -128,6 +128,7 @@ 

  

  

  def _get_mmds_from_requires(

+     db_session,

      requires,

      mmds,

      recursive=False,
@@ -139,6 +140,7 @@ 

      Helper method for get_mmds_required_by_module_recursively returning

      the list of module metadata objects defined by `requires` dict.

  

+     :param db_session: SQLAlchemy database session.

      :param dict requires: requires or buildrequires in the form {module: [streams]}

      :param mmds: Dictionary with already handled name:streams as a keys and lists

          of resulting mmds as values.
@@ -158,7 +160,7 @@ 

      # To be able to call itself recursively, we need to store list of mmds

      # we have added to global mmds list in this particular call.

      added_mmds = {}

-     resolver = module_build_service.resolver.system_resolver

+     resolver = GenericResolver.create(db_session, conf)

  

      for name, streams in requires.items():

          # Base modules are already added to `mmds`.
@@ -200,12 +202,12 @@ 

                  for deps in mmd.get_dependencies():

                      deps_dict = deps_to_dict(deps, 'runtime')

                      mmds = _get_mmds_from_requires(

-                         deps_dict, mmds, True, base_module_mmds=base_module_mmds)

+                         db_session, deps_dict, mmds, True, base_module_mmds=base_module_mmds)

  

      return mmds

  

  

- def _get_base_module_mmds(mmd):

+ def _get_base_module_mmds(db_session, mmd):

      """

      Returns list of MMDs of base modules buildrequired by `mmd` including the compatible

      old versions of the base module based on the stream version.
@@ -218,7 +220,7 @@ 

      seen = set()

      ret = {"ready": [], "garbage": []}

  

-     resolver = module_build_service.resolver.system_resolver

+     resolver = GenericResolver.create(db_session, conf)

      for deps in mmd.get_dependencies():

          buildrequires = {

              module: deps.get_buildtime_streams(module)
@@ -294,7 +296,7 @@ 

  

  

  def get_mmds_required_by_module_recursively(

-     mmd, default_streams=None, raise_if_stream_ambigous=False

+     db_session, mmd, default_streams=None, raise_if_stream_ambigous=False

  ):

      """

      Returns the list of Module metadata objects of all modules required while
@@ -309,6 +311,7 @@ 

      recursively all the "requires" and finds the latest version of each

      required module and also all contexts of these latest versions.

  

+     :param db_session: SQLAlchemy database session.

      :param dict default_streams: Dict in {module_name: module_stream, ...} format defining

          the default stream to choose for module in case when there are multiple streams to

          choose from.
@@ -326,7 +329,7 @@ 

      mmds = {}

  

      # Get the MMDs of all compatible base modules based on the buildrequires.

-     base_module_mmds = _get_base_module_mmds(mmd)

+     base_module_mmds = _get_base_module_mmds(db_session, mmd)

      if not base_module_mmds["ready"]:

          base_module_choices = " or ".join(conf.base_module_names)

          raise UnprocessableEntity(
@@ -350,7 +353,8 @@ 

      for deps in mmd.get_dependencies():

          deps_dict = deps_to_dict(deps, 'buildtime')

          mmds = _get_mmds_from_requires(

-             deps_dict, mmds, False, default_streams, raise_if_stream_ambigous, all_base_module_mmds)

+             db_session, deps_dict, mmds, False, default_streams, raise_if_stream_ambigous,

+             all_base_module_mmds)

  

      # Now get the requires of buildrequires recursively.

      for mmd_key in list(mmds.keys()):
@@ -358,13 +362,8 @@ 

              for deps in mmd.get_dependencies():

                  deps_dict = deps_to_dict(deps, 'runtime')

                  mmds = _get_mmds_from_requires(

-                     deps_dict,

-                     mmds,

-                     True,

-                     default_streams,

-                     raise_if_stream_ambigous,

-                     all_base_module_mmds,

-                 )

+                     db_session, deps_dict, mmds, True, default_streams,

+                     raise_if_stream_ambigous, all_base_module_mmds)

  

      # Make single list from dict of lists.

      res = []
@@ -375,13 +374,13 @@ 

      return res

  

  

- def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default_streams=None):

+ def generate_expanded_mmds(db_session, mmd, raise_if_stream_ambigous=False, default_streams=None):

      """

      Returns list with MMDs with buildrequires and requires set according

      to module stream expansion rules. These module metadata can be directly

      built using MBS.

  

-     :param session: SQLAlchemy DB session.

+     :param db_session: SQLAlchemy DB session.

      :param Modulemd.ModuleStream mmd: Modulemd metadata with original unexpanded module.

      :param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case

          there are multiple streams for some dependency of module and the module name is not
@@ -390,9 +389,6 @@ 

          the default stream to choose for module in case when there are multiple streams to

          choose from.

      """

-     if not session:

-         session = db.session

- 

      if not default_streams:

          default_streams = {}

  
@@ -404,13 +400,13 @@ 

      current_mmd.set_context(None)

  

      # Expands the MSE streams. This mainly handles '-' prefix in MSE streams.

-     expand_mse_streams(session, current_mmd, default_streams, raise_if_stream_ambigous)

+     expand_mse_streams(db_session, current_mmd, default_streams, raise_if_stream_ambigous)

  

      # Get the list of all MMDs which this module can be possibly built against

      # and add them to MMDResolver.

      mmd_resolver = MMDResolver()

      mmds_for_resolving = get_mmds_required_by_module_recursively(

-         current_mmd, default_streams, raise_if_stream_ambigous)

+         db_session, current_mmd, default_streams, raise_if_stream_ambigous)

      for m in mmds_for_resolving:

          mmd_resolver.add_modules(m)

  
@@ -523,7 +519,7 @@ 

          # Resolve the buildrequires and store the result in XMD.

          if "mbs" not in xmd:

              xmd["mbs"] = {}

-         resolver = module_build_service.resolver.system_resolver

+         resolver = GenericResolver.create(db_session, conf)

          xmd["mbs"]["buildrequires"] = resolver.resolve_requires(br_list)

          xmd["mbs"]["mse"] = True

  

@@ -74,14 +74,14 @@ 

      ]

  

  

- def get_reusable_module(session, module):

+ def get_reusable_module(db_session, module):

      """

      Returns previous module build of the module `module` in case it can be

      used as a source module to get the components to reuse from.

  

      In case there is no such module, returns None.

  

-     :param session: SQLAlchemy database session

+     :param db_session: SQLAlchemy database session

      :param module: the ModuleBuild object of module being built.

      :return: ModuleBuild object which can be used for component reuse.

      """
@@ -92,7 +92,7 @@ 

      mmd = module.mmd()

      # Find the latest module that is in the done or ready state

      previous_module_build = (

-         session.query(models.ModuleBuild)

+         db_session.query(models.ModuleBuild)

          .filter_by(name=mmd.get_module_name())

          .filter_by(stream=mmd.get_stream_name())

          .filter_by(state=models.BUILD_STATES["ready"])
@@ -115,12 +115,12 @@ 

          return None

  

      module.reused_module_id = previous_module_build.id

-     session.commit()

+     db_session.commit()

  

      return previous_module_build

  

  

- def attempt_to_reuse_all_components(builder, session, module):

+ def attempt_to_reuse_all_components(builder, db_session, module):

      """

      Tries to reuse all the components in a build. The components are also

      tagged to the tags using the `builder`.
@@ -129,7 +129,7 @@ 

      False is returned, no component has been reused.

      """

  

-     previous_module_build = get_reusable_module(session, module)

+     previous_module_build = get_reusable_module(db_session, module)

      if not previous_module_build:

          return False

  
@@ -145,7 +145,7 @@ 

          if c.package == "module-build-macros":

              continue

          component_to_reuse = get_reusable_component(

-             session,

+             db_session,

              module,

              c.package,

              previous_module_build=previous_module_build,
@@ -177,7 +177,7 @@ 

      return True

  

  

- def get_reusable_components(session, module, component_names, previous_module_build=None):

+ def get_reusable_components(db_session, module, component_names, previous_module_build=None):

      """

      Returns the list of ComponentBuild instances belonging to previous module

      build which can be reused in the build of module `module`.
@@ -188,7 +188,7 @@ 

      In case some component cannot be reused, None is used instead of a

      ComponentBuild instance in the returned list.

  

-     :param session: SQLAlchemy database session

+     :param db_session: SQLAlchemy database session

      :param module: the ModuleBuild object of module being built.

      :param component_names: List of component names to be reused.

      :kwarg previous_module_build: the ModuleBuild instance of a module build
@@ -202,7 +202,7 @@ 

          return [None] * len(component_names)

  

      if not previous_module_build:

-         previous_module_build = get_reusable_module(session, module)

+         previous_module_build = get_reusable_module(db_session, module)

      if not previous_module_build:

          return [None] * len(component_names)

  
@@ -213,19 +213,19 @@ 

      for component_name in component_names:

          ret.append(

              get_reusable_component(

-                 session, module, component_name, previous_module_build, mmd, old_mmd)

+                 db_session, module, component_name, previous_module_build, mmd, old_mmd)

          )

  

      return ret

  

  

  def get_reusable_component(

-     session, module, component_name, previous_module_build=None, mmd=None, old_mmd=None

+     db_session, module, component_name, previous_module_build=None, mmd=None, old_mmd=None

  ):

      """

      Returns the component (RPM) build of a module that can be reused

      instead of needing to rebuild it

-     :param session: SQLAlchemy database session

+     :param db_session: SQLAlchemy database session

      :param module: the ModuleBuild object of module being built with a formatted

          mmd

      :param component_name: the name of the component (RPM) that you'd like to
@@ -256,7 +256,7 @@ 

          return None

  

      if not previous_module_build:

-         previous_module_build = get_reusable_module(session, module)

+         previous_module_build = get_reusable_module(db_session, module)

          if not previous_module_build:

              return None

  
@@ -269,7 +269,7 @@ 

      # or the ref is missing, something has gone wrong and the component cannot

      # be reused

      new_module_build_component = models.ComponentBuild.from_component_name(

-         session, component_name, module.id)

+         db_session, component_name, module.id)

      if (

          not new_module_build_component

          or not new_module_build_component.batch
@@ -279,7 +279,7 @@ 

          return None

  

      prev_module_build_component = models.ComponentBuild.from_component_name(

-         session, component_name, previous_module_build.id

+         db_session, component_name, previous_module_build.id

      )

      # If the component to reuse for some reason was not found in the database,

      # or the ref is missing, something has gone wrong and the component cannot
@@ -375,7 +375,7 @@ 

              log.info("Cannot re-use. Architectures are different for package: %s." % pkg_name)

              return None

  

-     reusable_component = models.ComponentBuild.query.filter_by(

+     reusable_component = db_session.query(models.ComponentBuild).filter_by(

          package=component_name, module_id=previous_module_build.id).one()

      log.debug("Found reusable component!")

      return reusable_component

@@ -38,7 +38,7 @@ 

  from gi.repository import GLib

  

  import module_build_service.scm

- from module_build_service import conf, db, log, models, Modulemd

+ from module_build_service import conf, log, models, Modulemd

  from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden, Conflict

  from module_build_service.utils import (

      to_text_type, deps_to_dict, mmd_to_str, load_mmd, load_mmd_file,
@@ -46,29 +46,29 @@ 

  )

  

  

- def record_module_build_arches(mmd, build, session):

+ def record_module_build_arches(mmd, build, db_session):

      """

      Finds out the list of build arches against which the ModuleBuld `build` should be built

      and records them to `build.arches`.

  

      :param Modulemd mmd: The MMD file associated with a ModuleBuild.

      :param ModuleBuild build: The ModuleBuild.

-     :param session: Database session.

+     :param db_session: Database session.

      """

-     arches = get_build_arches(mmd, conf)

+     arches = get_build_arches(db_session, mmd, conf)

      for arch in arches:

-         arch_obj = session.query(models.ModuleArch).filter_by(name=arch).first()

+         arch_obj = db_session.query(models.ModuleArch).filter_by(name=arch).first()

          if not arch_obj:

              arch_obj = models.ModuleArch(name=arch)

-             session.add(arch_obj)

-             session.commit()

+             db_session.add(arch_obj)

+             db_session.commit()

  

          if arch_obj not in build.arches:

              build.arches.append(arch_obj)

-             session.add(build)

+             db_session.add(build)

  

  

- def record_filtered_rpms(mmd):

+ def record_filtered_rpms(db_session, mmd):

      """Record filtered RPMs that should not be installed into buildroot

  

      These RPMs are filtered:
@@ -76,6 +76,7 @@ 

      * Reads the mmd["xmd"]["buildrequires"] and extends it with "filtered_rpms"

        list containing the NVRs of filtered RPMs in a buildrequired module.

  

+     :param db_session: SQLAlchemy session object.

      :param Modulemd mmd: Modulemd that will be built next.

      :rtype: Modulemd.Module

      :return: Modulemd extended with the "filtered_rpms" in XMD section.
@@ -84,7 +85,7 @@ 

      from module_build_service.builder import GenericBuilder

      from module_build_service.resolver import GenericResolver

  

-     resolver = GenericResolver.create(conf)

+     resolver = GenericResolver.create(db_session, conf)

      builder = GenericBuilder.backends[conf.system]

  

      new_buildrequires = {}
@@ -141,7 +142,7 @@ 

      return {"pkg_name": pkg.get_name(), "pkg_ref": pkgref, "error": None}

  

  

- def format_mmd(mmd, scmurl, module=None, session=None):

+ def format_mmd(mmd, scmurl, module=None, db_session=None):

      """

      Prepares the modulemd for the MBS. This does things such as replacing the

      branches of components with commit hashes and adding metadata in the xmd
@@ -150,7 +151,7 @@ 

      :param scmurl: the url to the modulemd

      :param module: When specified together with `session`, the time_modified

          of a module is updated regularly in case this method takes lot of time.

-     :param session: Database session to update the `module`.

+     :param db_session: Database session to update the `module`.

      """

      # Import it here, because SCM uses utils methods and fails to import

      # them because of dep-chain.
@@ -241,9 +242,9 @@ 

              # that module is stuck in "init" state and it would send fake "init" message.

              while not async_result.ready():

                  async_result.wait(60)

-                 if module and session:

+                 if module and db_session:

                      module.time_modified = datetime.utcnow()

-                     session.commit()

+                     db_session.commit()

              pkg_dicts = async_result.get()

          finally:

              pool.close()
@@ -418,18 +419,11 @@ 

  

  

  def record_component_builds(

-     mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None, session=None

+     db_session, mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None

  ):

      # Imported here to allow import of utils in GenericBuilder.

      import module_build_service.builder

  

-     if not session:

-         session = db.session

- 

-     # Format the modulemd by putting in defaults and replacing streams that

-     # are branches with commit hashes

-     format_mmd(mmd, module.scmurl, module, session)

- 

      # When main_mmd is set, merge the metadata from this mmd to main_mmd,

      # otherwise our current mmd is main_mmd.

      if main_mmd:
@@ -494,8 +488,9 @@ 

              # It is OK to whitelist all URLs here, because the validity

              # of every URL have been already checked in format_mmd(...).

              included_mmd = _fetch_mmd(full_url, whitelist_url=True)[0]

+             format_mmd(included_mmd, module.scmurl, module, db_session)

              batch = record_component_builds(

-                 included_mmd, module, batch, previous_buildorder, main_mmd, session=session)

+                 db_session, included_mmd, module, batch, previous_buildorder, main_mmd)

              continue

  

          package = component.get_name()
@@ -509,7 +504,7 @@ 

  

          # Skip the ComponentBuild if it already exists in database. This can happen

          # in case of module build resubmition.

-         existing_build = models.ComponentBuild.from_component_name(db.session, package, module.id)

+         existing_build = models.ComponentBuild.from_component_name(db_session, package, module.id)

          if existing_build:

              # Check that the existing build has the same most important attributes.

              # This should never be a problem, but it's good to be defensive here so
@@ -534,12 +529,14 @@ 

              ref=component_ref,

              weight=rpm_weights[package],

          )

-         session.add(build)

+         db_session.add(build)

  

      return batch

  

  

- def submit_module_build_from_yaml(username, handle, params, stream=None, skiptests=False):

+ def submit_module_build_from_yaml(

+     db_session, username, handle, params, stream=None, skiptests=False

+ ):

      yaml_file = to_text_type(handle.read())

      mmd = load_mmd(yaml_file)

      dt = datetime.utcfromtimestamp(int(time.time()))
@@ -562,13 +559,13 @@ 

          macros = buildopts.get_rpm_macros() or ""

          buildopts.set_rpm_macros(macros + "\n\n%__spec_check_pre exit 0\n")

          mmd.set_buildopts(buildopts)

-     return submit_module_build(username, mmd, params)

+     return submit_module_build(db_session, username, mmd, params)

  

  

  _url_check_re = re.compile(r"^[^:/]+:.*$")

  

  

- def submit_module_build_from_scm(username, params, allow_local_url=False):

+ def submit_module_build_from_scm(db_session, username, params, allow_local_url=False):

      url = params["scmurl"]

      branch = params["branch"]

      # Translate local paths into file:// URL
@@ -578,7 +575,7 @@ 

          url = "file://" + url

      mmd, scm = _fetch_mmd(url, branch, allow_local_url)

  

-     return submit_module_build(username, mmd, params)

+     return submit_module_build(db_session, username, mmd, params)

  

  

  def _apply_dep_overrides(mmd, params):
@@ -685,7 +682,7 @@ 

              )

  

  

- def _modify_buildtime_streams(mmd, new_streams_func):

+ def _modify_buildtime_streams(db_session, mmd, new_streams_func):

      """

      Modify buildtime streams using the input new_streams_func.

  
@@ -702,7 +699,7 @@ 

          new_dep = Modulemd.Dependencies()

  

          for name, streams in brs.items():

-             new_streams = new_streams_func(name, streams)

+             new_streams = new_streams_func(db_session, name, streams)

              if streams != new_streams:

                  overridden = True

  
@@ -726,7 +723,7 @@ 

              mmd.add_dependencies(new_dep)

  

  

- def resolve_base_module_virtual_streams(name, streams):

+ def resolve_base_module_virtual_streams(db_session, name, streams):

      """

      Resolve any base module virtual streams and return a copy of `streams` with the resolved values.

  
@@ -735,7 +732,8 @@ 

      :return: the resolved streams

      :rtype: list

      """

-     from module_build_service.resolver import system_resolver

+     from module_build_service.resolver import GenericResolver

+     resolver = GenericResolver.create(db_session, conf)

  

      if name not in conf.base_module_names:

          return streams
@@ -749,7 +747,7 @@ 

  

          # Check if the base module stream is available

          log.debug('Checking to see if the base module "%s:%s" is available', name, stream)

-         if system_resolver.get_module_count(name=name, stream=stream) > 0:

+         if resolver.get_module_count(name=name, stream=stream) > 0:

              continue

  

          # If the base module stream is not available, check if there's a virtual stream
@@ -757,7 +755,7 @@ 

              'Checking to see if there is a base module "%s" with the virtual stream "%s"',

              name, stream,

          )

-         base_module_mmd = system_resolver.get_latest_with_virtual_stream(

+         base_module_mmd = resolver.get_latest_with_virtual_stream(

              name=name, virtual_stream=stream

          )

          if not base_module_mmd:
@@ -780,7 +778,7 @@ 

      return new_streams

  

  

- def _process_support_streams(mmd, params):

+ def _process_support_streams(db_session, mmd, params):

      """

      Check if any buildrequired base modules require a support stream suffix.

  
@@ -803,7 +801,7 @@ 

  

      buildrequire_overrides = params.get("buildrequire_overrides", {})

  

-     def new_streams_func(name, streams):

+     def new_streams_func(db_session, name, streams):

          if name not in conf.base_module_names:

              log.debug("The module %s is not a base module. Skipping the release date check.", name)

              return streams
@@ -896,13 +894,14 @@ 

  

          return new_streams

  

-     _modify_buildtime_streams(mmd, new_streams_func)

+     _modify_buildtime_streams(db_session, mmd, new_streams_func)

  

  

- def submit_module_build(username, mmd, params):

+ def submit_module_build(db_session, username, mmd, params):

      """

      Submits new module build.

  

+     :param db_session: SQLAlchemy session object.

      :param str username: Username of the build's owner.

      :param Modulemd.ModuleStream mmd: Modulemd defining the build.

      :param dict params: the API parameters passed in by the user
@@ -931,10 +930,10 @@ 

      if "default_streams" in params:

          default_streams = params["default_streams"]

      _apply_dep_overrides(mmd, params)

-     _modify_buildtime_streams(mmd, resolve_base_module_virtual_streams)

-     _process_support_streams(mmd, params)

+     _modify_buildtime_streams(db_session, mmd, resolve_base_module_virtual_streams)

+     _process_support_streams(db_session, mmd, params)

  

-     mmds = generate_expanded_mmds(db.session, mmd, raise_if_stream_ambigous, default_streams)

+     mmds = generate_expanded_mmds(db_session, mmd, raise_if_stream_ambigous, default_streams)

      if not mmds:

          raise ValidationError(

              "No dependency combination was satisfied. Please verify the "
@@ -954,7 +953,7 @@ 

          nsvc = mmd.get_nsvc()

  

          log.debug("Checking whether module build already exists: %s.", nsvc)

-         module = models.ModuleBuild.get_build_from_nsvc(db.session, *nsvc.split(":"))

+         module = models.ModuleBuild.get_build_from_nsvc(db_session, *nsvc.split(":"))

          if module and not params.get("scratch", False):

              if module.state != models.BUILD_STATES["failed"]:

                  log.info(
@@ -977,7 +976,7 @@ 

                  if component.state and component.state != koji.BUILD_STATES["COMPLETE"]:

                      component.state = None

                      component.state_reason = None

-                     db.session.add(component)

+                     db_session.add(component)

              module.username = username

              prev_state = module.previous_non_failed_state

              if prev_state == models.BUILD_STATES["init"]:
@@ -985,7 +984,7 @@ 

              else:

                  transition_to = models.BUILD_STATES["wait"]

                  module.batch = 0

-             module.transition(conf, transition_to, "Resubmitted by %s" % username)

+             module.transition(db_session, conf, transition_to, "Resubmitted by %s" % username)

              log.info("Resumed existing module build in previous state %s" % module.state)

          else:

              # make NSVC unique for every scratch build
@@ -993,7 +992,7 @@ 

              if params.get("scratch", False):

                  log.debug("Checking for existing scratch module builds by NSVC")

                  scrmods = models.ModuleBuild.get_scratch_builds_from_nsvc(

-                     db.session, *nsvc.split(":"))

+                     db_session, *nsvc.split(":"))

                  scrmod_contexts = [scrmod.context for scrmod in scrmods]

                  log.debug(

                      "Found %d previous scratch module build context(s): %s",
@@ -1005,7 +1004,7 @@ 

  

              log.debug("Creating new module build")

              module = models.ModuleBuild.create(

-                 db.session,

+                 db_session,

                  conf,

                  name=mmd.get_module_name(),

                  stream=mmd.get_stream_name(),
@@ -1027,8 +1026,8 @@ 

              module.context += context_suffix

  

          all_modules_skipped = False

-         db.session.add(module)

-         db.session.commit()

+         db_session.add(module)

+         db_session.commit()

          modules.append(module)

          log.info('The user "%s" submitted the build "%s"', username, nsvc)

  
@@ -1135,20 +1134,18 @@ 

      return mmd, scm

  

  

- def load_local_builds(local_build_nsvs, session=None):

+ def load_local_builds(db_session, local_build_nsvs):

      """

      Loads previously finished local module builds from conf.mock_resultsdir

      and imports them to database.

  

+     :param db_session: SQLAlchemy session object.

      :param local_build_nsvs: List of NSV separated by ':' defining the modules

          to load from the mock_resultsdir.

      """

      if not local_build_nsvs:

          return

  

-     if not session:

-         session = db.session

- 

      if type(local_build_nsvs) != list:

          local_build_nsvs = [local_build_nsvs]

  
@@ -1206,7 +1203,7 @@ 

  

          # Create ModuleBuild in database.

          module = models.ModuleBuild.create(

-             session,

+             db_session,

              conf,

              name=mmd.get_module_name(),

              stream=mmd.get_stream_name(),
@@ -1219,7 +1216,7 @@ 

          )

          module.koji_tag = path

          module.state = models.BUILD_STATES["ready"]

-         session.commit()

+         db_session.commit()

  

          if (

              found_build[0] != module.name

@@ -24,7 +24,7 @@ 

  import re

  

  from module_build_service import conf, log

- from module_build_service.resolver import system_resolver

+ from module_build_service.resolver import GenericResolver

  

  

  """
@@ -107,7 +107,7 @@ 

      ]

  

  

- def get_modulemds_from_ursine_content(tag):

+ def get_modulemds_from_ursine_content(db_session, tag):

      """Get all modules metadata which were added to ursine content

  

      Ursine content is the tag inheritance managed by Ursa-Major by adding
@@ -124,6 +124,7 @@ 

      So, this function is to find out all module koji_tags from the build tag

      and return corresponding module metadata.

  

+     :param db_session: SQLAlchemy database session.

      :param str tag: a base module's koji_tag.

      :return: list of module metadata. Empty list will be returned if no ursine

          modules metadata is found.
@@ -131,6 +132,8 @@ 

      """

      from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  

+     resolver = GenericResolver.create(db_session, conf)

+ 

      koji_session = KojiModuleBuilder.get_session(conf, login=False)

      repos = koji_session.getExternalRepoList(tag)

      build_tags = find_build_tags_from_external_repos(koji_session, repos)
@@ -141,7 +144,7 @@ 

      for tag in build_tags:

          koji_tags = find_module_koji_tags(koji_session, tag)

          for koji_tag in koji_tags:

-             md = system_resolver.get_modulemd_by_koji_tag(koji_tag)

+             md = resolver.get_modulemd_by_koji_tag(koji_tag)

              if md:

                  modulemds.append(md)

              else:
@@ -149,11 +152,12 @@ 

      return modulemds

  

  

- def find_stream_collision_modules(buildrequired_modules, koji_tag):

+ def find_stream_collision_modules(db_session, buildrequired_modules, koji_tag):

      """

      Find buildrequired modules that are part of the ursine content represented

      by the koji_tag but with a different stream.

  

+     :param db_session: SQLAlchemy database session.

      :param dict buildrequired_modules: a mapping of buildrequires, which is just

          the ``xmd/mbs/buildrequires``. This mapping is used to determine if a module

          found from ursine content is a buildrequire with different stream.
@@ -164,7 +168,7 @@ 

          found, an empty list is returned.

      :rtype: list[str]

      """

-     ursine_modulemds = get_modulemds_from_ursine_content(koji_tag)

+     ursine_modulemds = get_modulemds_from_ursine_content(db_session, koji_tag)

      if not ursine_modulemds:

          log.debug("No module metadata is found from ursine content.")

          return []
@@ -193,7 +197,7 @@ 

      return collision_modules

  

  

- def handle_stream_collision_modules(mmd):

+ def handle_stream_collision_modules(db_session, mmd):

      """

      Find out modules from ursine content and record those that are buildrequire

      module but have different stream. And finally, record built RPMs of these
@@ -212,6 +216,7 @@ 

      which is a list of NSVC strings. Each of them is the module added to ursine

      content by Ursa-Major.

  

+     :param db_session: SQLAlchemy database session.

      :param mmd: a module's metadata which will be built.

      :type mmd: Modulemd.Module

      """
@@ -243,13 +248,14 @@ 

              )

              continue

  

-         modules_nsvc = find_stream_collision_modules(buildrequires, base_module_info["koji_tag"])

+         modules_nsvc = find_stream_collision_modules(

+             db_session, buildrequires, base_module_info["koji_tag"])

  

          if modules_nsvc:

              # Save modules NSVC for later use in subsequent event handlers to

              # log readable messages.

              base_module_info["stream_collision_modules"] = modules_nsvc

-             base_module_info["ursine_rpms"] = find_module_built_rpms(modules_nsvc)

+             base_module_info["ursine_rpms"] = find_module_built_rpms(db_session, modules_nsvc)

          else:

              log.info("No stream collision module is found against base module %s.", module_name)

              # Always set in order to mark it as handled already.
@@ -259,9 +265,10 @@ 

      mmd.set_xmd(xmd)

  

  

- def find_module_built_rpms(modules_nsvc):

+ def find_module_built_rpms(db_session, modules_nsvc):

      """Find out built RPMs of given modules

  

+     :param db_session: SQLAlchemy database session.

      :param modules_nsvc: a list of modules' NSVC to find out built RPMs for

          each of them.

      :type modules_nsvc: list[str]
@@ -269,10 +276,9 @@ 

      :rtype: list[str]

      """

      import kobo.rpmlib

-     from module_build_service.resolver import GenericResolver

      from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  

-     resolver = GenericResolver.create(conf)

+     resolver = GenericResolver.create(db_session, conf)

  

      built_rpms = []

      koji_session = KojiModuleBuilder.get_session(conf, login=False)

file modified
+19 -7
@@ -119,6 +119,9 @@ 

              elif short_flag == "true" or short_flag == "1":

                  if hasattr(p_query.items[0], "short_json"):

                      json_func_name = "short_json"

+             if json_func_name == "json" or json_func_name == "extended_json":

+                 # Only ModuleBuild.json and ModuleBuild.extended_json has argument db_session

+                 json_func_kwargs["db_session"] = db.session

              json_data["items"] = [

                  getattr(item, json_func_name)(**json_func_kwargs) for item in p_query.items

              ]
@@ -135,6 +138,9 @@ 

                  elif short_flag == "true" or short_flag == "1":

                      if getattr(instance, "short_json", None):

                          json_func_name = "short_json"

+                 if json_func_name == "json" or json_func_name == "extended_json":

+                     # Only ModuleBuild.json and ModuleBuild.extended_json has argument db_session

+                     json_func_kwargs["db_session"] = db.session

                  return jsonify(getattr(instance, json_func_name)(**json_func_kwargs)), 200

              else:

                  raise NotFound("No such %s found." % self.kind)
@@ -177,9 +183,9 @@ 

          modules = handler.post()

          if api_version == 1:

              # Only show the first module build for backwards-compatibility

-             rv = modules[0].extended_json(True, api_version)

+             rv = modules[0].extended_json(db.session, True, api_version)

          else:

-             rv = [module.extended_json(True, api_version) for module in modules]

+             rv = [module.extended_json(db.session, True, api_version) for module in modules]

          return jsonify(rv), 201

  

      @validate_api_version()
@@ -217,14 +223,15 @@ 

              raise Forbidden("You can't cancel a failed module")

  

          if r["state"] == "failed" or r["state"] == str(models.BUILD_STATES["failed"]):

-             module.transition(conf, models.BUILD_STATES["failed"], "Canceled by %s." % username)

+             module.transition(

+                 db.session, conf, models.BUILD_STATES["failed"], "Canceled by %s." % username)

          else:

              log.error('The provided state change of "{}" is not supported'.format(r["state"]))

              raise ValidationError("The provided state change is not supported")

          db.session.add(module)

          db.session.commit()

  

-         return jsonify(module.extended_json(True, api_version)), 200

+         return jsonify(module.extended_json(db.session, True, api_version)), 200

  

  

  class AboutAPI(MethodView):
@@ -291,7 +298,10 @@ 

  

          mmd = get_mmd_from_scm(handler.data["scmurl"])

          build, messages = import_mmd(db.session, mmd)

-         json_data = {"module": build.json(show_tasks=False), "messages": messages}

+         json_data = {

+             "module": build.json(db.session, show_tasks=False),

+             "messages": messages

+         }

  

          # return 201 Created if we reach this point

          return jsonify(json_data), 201
@@ -442,7 +452,8 @@ 

              self.validate_optional_params()

  

      def post(self):

-         return submit_module_build_from_scm(self.username, self.data, allow_local_url=False)

+         return submit_module_build_from_scm(

+             db.session, self.username, self.data, allow_local_url=False)

  

  

  class YAMLFileHandler(BaseHandler):
@@ -467,7 +478,8 @@ 

                  handle.filename = self.data["module_name"]

          else:

              handle = request.files["yaml"]

-         return submit_module_build_from_yaml(self.username, handle, self.data)

+         return submit_module_build_from_yaml(

+             db.session, self.username, handle, self.data)

  

  

  def _dict_from_request(request):

file modified
+116 -383
@@ -34,7 +34,7 @@ 

  from module_build_service.utils import get_rpm_release, import_mmd, mmd_to_str

  from module_build_service.config import init_config

  from module_build_service.models import (

-     ModuleBuild, ComponentBuild, VirtualStream, make_session, BUILD_STATES,

+     ModuleBuild, ComponentBuild, VirtualStream, make_db_session, BUILD_STATES,

  )

  from module_build_service import Modulemd

  
@@ -110,6 +110,17 @@ 

  

  

  def clean_database(add_platform_module=True, add_default_arches=True):

+     """Initialize the test database

+ 

+     This function is responsible for dropping all the data in the database and

+     recreating all the tables from scratch.

+ 

+     Please note that, this function relies on database objects managed by

+     Flask-SQLAlchemy.

+     """

+     # Ensure all pending transactions are committed and do not block subsequent

+     # DML on tables.

+     # TODO: Should the code be fixed that forget to commit?

      db.session.commit()

      db.drop_all()

      db.create_all()
@@ -153,7 +164,7 @@ 

              # Just to possibly confuse tests by adding another base module.

              mmd = mmd.copy("bootstrap", stream)

              import_mmd(db.session, mmd)

-     with make_session(conf) as db_session:

+     with make_db_session(conf) as db_session:

          _populate_data(db_session, data_size, contexts=contexts, scratch=scratch)

  

  
@@ -203,34 +214,35 @@ 

  

              db_session.add(build_one)

              db_session.commit()

-             build_one_component_release = get_rpm_release(build_one)

  

-             db_session.add(ComponentBuild(

-                 package="nginx",

-                 scmurl="git://pkgs.domain.local/rpms/nginx?"

-                        "#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3",

-                 format="rpms",

-                 task_id=12312345 + index,

-                 state=koji.BUILD_STATES["COMPLETE"],

-                 nvr="nginx-1.10.1-2.{0}".format(build_one_component_release),

-                 batch=1,

-                 module_id=2 + index * 3,

-                 tagged=True,

-                 tagged_in_final=True,

-             ))

-             db_session.add(ComponentBuild(

-                 package="module-build-macros",

-                 scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

-                        "module-build-macros-0.1-1.module_nginx_1_2.src.rpm",

-                 format="rpms",

-                 task_id=12312321 + index,

-                 state=koji.BUILD_STATES["COMPLETE"],

-                 nvr="module-build-macros-01-1.{0}".format(build_one_component_release),

-                 batch=2,

-                 module_id=2 + index * 3,

-                 tagged=True,

-                 tagged_in_final=True,

-             ))

+             build_one_component_release = get_rpm_release(db_session, build_one)

+ 

+             db_session.add_all([

+                 ComponentBuild(

+                     package="nginx",

+                     scmurl="git://pkgs.domain.local/rpms/nginx?"

+                            "#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3",

+                     format="rpms",

+                     task_id=12312345 + index,

+                     state=koji.BUILD_STATES["COMPLETE"],

+                     nvr="nginx-1.10.1-2.{0}".format(build_one_component_release),

+                     batch=1,

+                     module_id=2 + index * 3,

+                     tagged=True,

+                     tagged_in_final=True),

+                 ComponentBuild(

+                     package="module-build-macros",

+                     scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

+                            "module-build-macros-0.1-1.module_nginx_1_2.src.rpm",

+                     format="rpms",

+                     task_id=12312321 + index,

+                     state=koji.BUILD_STATES["COMPLETE"],

+                     nvr="module-build-macros-01-1.{0}".format(build_one_component_release),

+                     batch=2,

+                     module_id=2 + index * 3,

+                     tagged=True,

+                     tagged_in_final=True)

+             ])

              db_session.commit()

  

          build_two = ModuleBuild(
@@ -255,33 +267,32 @@ 

          db_session.add(build_two)

          db_session.commit()

  

-         build_two_component_release = get_rpm_release(build_two)

- 

-         db_session.add(ComponentBuild(

-             package="postgresql",

-             scmurl="git://pkgs.domain.local/rpms/postgresql"

-                    "?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3",

-             format="rpms",

-             task_id=2433433 + index,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="postgresql-9.5.3-4.{0}".format(build_two_component_release),

-             batch=2,

-             module_id=3 + index * 3,

-             tagged=True,

-             tagged_in_final=True,

-         ))

-         db_session.add(ComponentBuild(

-             package="module-build-macros",

-             scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

-                    "module-build-macros-0.1-1.module_postgresql_1_2.src.rpm",

-             format="rpms",

-             task_id=47383993 + index,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="module-build-macros-01-1.{0}".format(build_two_component_release),

-             batch=1,

-             module_id=3 + index * 3,

-         ))

+         build_two_component_release = get_rpm_release(db_session, build_two)

  

+         db_session.add_all([

+             ComponentBuild(

+                 package="postgresql",

+                 scmurl="git://pkgs.domain.local/rpms/postgresql"

+                        "?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3",

+                 format="rpms",

+                 task_id=2433433 + index,

+                 state=koji.BUILD_STATES["COMPLETE"],

+                 nvr="postgresql-9.5.3-4.{0}".format(build_two_component_release),

+                 batch=2,

+                 module_id=3 + index * 3,

+                 tagged=True,

+                 tagged_in_final=True),

+             ComponentBuild(

+                 package="module-build-macros",

+                 scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

+                        "module-build-macros-0.1-1.module_postgresql_1_2.src.rpm",

+                 format="rpms",

+                 task_id=47383993 + index,

+                 state=koji.BUILD_STATES["COMPLETE"],

+                 nvr="module-build-macros-01-1.{0}".format(build_two_component_release),

+                 batch=1,

+                 module_id=3 + index * 3)

+         ])

          db_session.commit()

  

          build_three = ModuleBuild(
@@ -304,34 +315,32 @@ 

          db_session.add(build_three)

          db_session.commit()

  

-         build_three_component_release = get_rpm_release(build_three)

- 

-         db_session.add(ComponentBuild(

-             package="rubygem-rails",

-             scmurl="git://pkgs.domain.local/rpms/rubygem-rails"

-                    "?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3",

-             format="rpms",

-             task_id=2433433 + index,

-             state=koji.BUILD_STATES["FAILED"],

-             nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release),

-             batch=2,

-             module_id=4 + index * 3,

-         ))

- 

-         db_session.add(ComponentBuild(

-             package="module-build-macros",

-             scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

-                    "module-build-macros-0.1-1.module_testmodule_1_2.src.rpm",

-             format="rpms",

-             task_id=47383993 + index,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="module-build-macros-01-1.{0}".format(build_three_component_release),

-             batch=1,

-             module_id=4 + index * 3,

-             tagged=True,

-             build_time_only=True,

-         ))

+         build_three_component_release = get_rpm_release(db_session, build_three)

  

+         db_session.add_all([

+             ComponentBuild(

+                 package="rubygem-rails",

+                 scmurl="git://pkgs.domain.local/rpms/rubygem-rails"

+                        "?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3",

+                 format="rpms",

+                 task_id=2433433 + index,

+                 state=koji.BUILD_STATES["FAILED"],

+                 nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release),

+                 batch=2,

+                 module_id=4 + index * 3),

+             ComponentBuild(

+                 package="module-build-macros",

+                 scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

+                        "module-build-macros-0.1-1.module_testmodule_1_2.src.rpm",

+                 format="rpms",

+                 task_id=47383993 + index,

+                 state=koji.BUILD_STATES["COMPLETE"],

+                 nvr="module-build-macros-01-1.{0}".format(build_three_component_release),

+                 batch=1,

+                 module_id=4 + index * 3,

+                 tagged=True,

+                 build_time_only=True)

+         ])

          db_session.commit()

  

  
@@ -374,9 +383,9 @@ 

      arch = db_session.query(module_build_service.models.ModuleArch).get(1)

      module_build.arches.append(arch)

  

-     build_one_component_release = get_rpm_release(module_build)

+     build_one_component_release = get_rpm_release(db_session, module_build)

  

-     module_build_comp_builds = [

+     db_session.add_all([

          module_build_service.models.ComponentBuild(

              module_id=module_build.id,

              package="perl-Tangerine",
@@ -434,308 +443,10 @@ 

              tagged=True,

              build_time_only=True,

          ),

-     ]

-     for c in module_build_comp_builds:

-         db_session.add(c)

+     ])

      db_session.commit()

  

  

- def reuse_component_init_data():

-     clean_database()

- 

-     mmd = load_mmd(read_staged_data("formatted_testmodule"))

- 

-     build_one = module_build_service.models.ModuleBuild(

-         name="testmodule",

-         stream="master",

-         version='20170109091357',

-         state=BUILD_STATES["ready"],

-         ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

-         runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

-         build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",

-         context="78e4a6fd",

-         koji_tag="module-testmodule-master-20170109091357-78e4a6fd",

-         scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",

-         batch=3,

-         owner="Tom Brady",

-         time_submitted=datetime(2017, 2, 15, 16, 8, 18),

-         time_modified=datetime(2017, 2, 15, 16, 19, 35),

-         time_completed=datetime(2017, 2, 15, 16, 19, 35),

-         rebuild_strategy="changed-and-after",

-     )

- 

-     build_one_component_release = get_rpm_release(build_one)

- 

-     mmd.set_version(int(build_one.version))

-     xmd = mmd.get_xmd()

-     xmd["mbs"]["scmurl"] = build_one.scmurl

-     xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"

-     mmd.set_xmd(xmd)

-     build_one.modulemd = mmd_to_str(mmd)

- 

-     db.session.add(build_one)

-     db.session.commit()

-     db.session.refresh(build_one)

- 

-     platform_br = module_build_service.models.ModuleBuild.get_by_id(db.session, 1)

-     build_one.buildrequires.append(platform_br)

- 

-     arch = module_build_service.models.ModuleArch.query.get(1)

-     build_one.arches.append(arch)

- 

-     build_one_comp_builds = [

-         module_build_service.models.ComponentBuild(

-             module_id=build_one.id,

-             package="perl-Tangerine",

-             scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

-                    "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",

-             format="rpms",

-             task_id=90276227,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release),

-             batch=2,

-             ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",

-             tagged=True,

-             tagged_in_final=True,

-         ),

-         module_build_service.models.ComponentBuild(

-             module_id=build_one.id,

-             package="perl-List-Compare",

-             scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

-                    "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

-             format="rpms",

-             task_id=90276228,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release),

-             batch=2,

-             ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

-             tagged=True,

-             tagged_in_final=True,

-         ),

-         module_build_service.models.ComponentBuild(

-             module_id=build_one.id,

-             package="tangerine",

-             scmurl="https://src.fedoraproject.org/rpms/tangerine"

-                    "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",

-             format="rpms",

-             task_id=90276315,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="tangerine-0.22-3.{0}".format(build_one_component_release),

-             batch=3,

-             ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",

-             tagged=True,

-             tagged_in_final=True,

-         ),

-         module_build_service.models.ComponentBuild(

-             module_id=build_one.id,

-             package="module-build-macros",

-             scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

-                    "macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",

-             format="rpms",

-             task_id=90276181,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="module-build-macros-0.1-1.{0}".format(build_one_component_release),

-             batch=1,

-             tagged=True,

-             build_time_only=True,

-         ),

-     ]

-     for c in build_one_comp_builds:

-         db.session.add(c)

- 

-     # Commit component builds added to build_one

-     db.session.commit()

- 

-     build_two = module_build_service.models.ModuleBuild(

-         name="testmodule",

-         stream="master",

-         version='20170219191323',

-         state=BUILD_STATES["build"],

-         ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

-         runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

-         build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",

-         context="c40c156c",

-         koji_tag="module-testmodule-master-20170219191323-c40c156c",

-         scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a",

-         batch=1,

-         owner="Tom Brady",

-         time_submitted=datetime(2017, 2, 19, 16, 8, 18),

-         time_modified=datetime(2017, 2, 19, 16, 8, 18),

-         rebuild_strategy="changed-and-after",

-     )

- 

-     build_two_component_release = get_rpm_release(build_two)

- 

-     mmd.set_version(int(build_one.version))

-     xmd = mmd.get_xmd()

-     xmd["mbs"]["scmurl"] = build_one.scmurl

-     xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"

-     mmd.set_xmd(xmd)

-     build_two.modulemd = mmd_to_str(mmd)

- 

-     db.session.add(build_two)

-     db.session.commit()

-     db.session.refresh(build_two)

- 

-     build_two.arches.append(arch)

-     build_two.buildrequires.append(platform_br)

- 

-     build_two_comp_builds = [

-         module_build_service.models.ComponentBuild(

-             module_id=build_two.id,

-             package="perl-Tangerine",

-             scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

-                    "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",

-             format="rpms",

-             batch=2,

-             ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",

-         ),

-         module_build_service.models.ComponentBuild(

-             module_id=build_two.id,

-             package="perl-List-Compare",

-             scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

-                    "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

-             format="rpms",

-             batch=2,

-             ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

-         ),

-         module_build_service.models.ComponentBuild(

-             module_id=build_two.id,

-             package="tangerine",

-             scmurl="https://src.fedoraproject.org/rpms/tangerine"

-                    "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",

-             format="rpms",

-             batch=3,

-             ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",

-         ),

-         module_build_service.models.ComponentBuild(

-             module_id=build_two.id,

-             package="module-build-macros",

-             scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

-                    "macros-0.1-1.module_testmodule_master_20170219191323.src.rpm",

-             format="rpms",

-             task_id=90276186,

-             state=koji.BUILD_STATES["COMPLETE"],

-             nvr="module-build-macros-0.1-1.{0}".format(build_two_component_release),

-             batch=1,

-             tagged=True,

-             build_time_only=True,

-         ),

-     ]

-     for c in build_two_comp_builds:

-         db.session.add(c)

- 

-     # Commit component builds added to build_two

-     db.session.commit()

- 

- 

- def reuse_shared_userspace_init_data():

-     clean_database()

- 

-     with make_session(conf) as session:

-         # Create shared-userspace-570, state is COMPLETE, all components

-         # are properly built.

-         mmd = load_mmd(read_staged_data("shared-userspace-570"))

- 

-         module_build = module_build_service.models.ModuleBuild(

-             name=mmd.get_module_name(),

-             stream=mmd.get_stream_name(),

-             version=mmd.get_version(),

-             build_context="e046b867a400a06a3571f3c71142d497895fefbe",

-             runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",

-             state=BUILD_STATES["ready"],

-             modulemd=mmd_to_str(mmd),

-             koji_tag="module-shared-userspace-f26-20170601141014-75f92abb",

-             scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",

-             batch=16,

-             owner="Tom Brady",

-             time_submitted=datetime(2017, 2, 15, 16, 8, 18),

-             time_modified=datetime(2017, 2, 15, 16, 19, 35),

-             time_completed=datetime(2017, 2, 15, 16, 19, 35),

-             rebuild_strategy="changed-and-after",

-         )

- 

-         components = [

-             mmd.get_rpm_component(rpm)

-             for rpm in mmd.get_rpm_component_names()

-         ]

-         components.sort(key=lambda x: x.get_buildorder())

-         previous_buildorder = None

-         batch = 1

-         for pkg in components:

-             # Increment the batch number when buildorder increases.

-             if previous_buildorder != pkg.get_buildorder():

-                 previous_buildorder = pkg.get_buildorder()

-                 batch += 1

- 

-             pkgref = mmd.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]

-             full_url = pkg.get_repository() + "?#" + pkgref

- 

-             module_build.component_builds.append(

-                 module_build_service.models.ComponentBuild(

-                     package=pkg.get_name(),

-                     format="rpms",

-                     scmurl=full_url,

-                     batch=batch,

-                     ref=pkgref,

-                     state=1,

-                     tagged=True,

-                     tagged_in_final=True,

-                 )

-             )

- 

-         session.add(module_build)

-         session.commit()

- 

-         # Create shared-userspace-577, state is WAIT, no component built

-         mmd2 = load_mmd(read_staged_data("shared-userspace-577"))

- 

-         module_build = module_build_service.models.ModuleBuild(

-             name=mmd2.get_module_name(),

-             stream=mmd2.get_stream_name(),

-             version=mmd2.get_version(),

-             build_context="e046b867a400a06a3571f3c71142d497895fefbe",

-             runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",

-             state=BUILD_STATES["done"],

-             modulemd=mmd_to_str(mmd2),

-             koji_tag="module-shared-userspace-f26-20170605091544-75f92abb",

-             scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",

-             batch=0,

-             owner="Tom Brady",

-             time_submitted=datetime(2017, 2, 15, 16, 8, 18),

-             time_modified=datetime(2017, 2, 15, 16, 19, 35),

-             time_completed=datetime(2017, 2, 15, 16, 19, 35),

-             rebuild_strategy="changed-and-after",

-         )

- 

-         components2 = [

-             mmd2.get_rpm_component(rpm)

-             for rpm in mmd2.get_rpm_component_names()

-         ]

-         # Store components to database in different order than for 570 to

-         # reproduce the reusing issue.

-         components2.sort(key=lambda x: len(x.get_name()))

-         components2.sort(key=lambda x: x.get_buildorder())

-         previous_buildorder = None

-         batch = 1

-         for pkg in components2:

-             # Increment the batch number when buildorder increases.

-             if previous_buildorder != pkg.get_buildorder():

-                 previous_buildorder = pkg.get_buildorder()

-                 batch += 1

- 

-             pkgref = mmd2.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]

-             full_url = pkg.get_repository() + "?#" + pkgref

- 

-             module_build.component_builds.append(

-                 module_build_service.models.ComponentBuild(

-                     package=pkg.get_name(), format="rpms", scmurl=full_url, batch=batch, ref=pkgref)

-             )

- 

-         session.add(module_build)

-         session.commit()

- 

- 

  def make_module(

      db_session,

      nsvc,
@@ -881,3 +592,25 @@ 

              db_session.commit()

  

      return module_build

+ 

+ 

+ def module_build_from_modulemd(yaml):

+     """

+     Create a ModuleBuild object and return. It is not written into database.

+     Please commit by yourself if necessary.

+     """

+     mmd = load_mmd(yaml)

+     build = ModuleBuild()

+     build.name = mmd.get_module_name()

+     build.stream = mmd.get_stream_name()

+     build.version = mmd.get_version()

+     build.state = BUILD_STATES["ready"]

+     build.modulemd = yaml

+     build.koji_tag = None

+     build.batch = 0

+     build.owner = "some_other_user"

+     build.time_submitted = datetime(2016, 9, 3, 12, 28, 33)

+     build.time_modified = datetime(2016, 9, 3, 12, 28, 40)

+     build.time_completed = None

+     build.rebuild_strategy = "changed-and-after"

+     return build

file modified
+325 -6
@@ -18,14 +18,19 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  #

- import os

  

+ import koji

+ import os

  import pytest

  

+ from datetime import datetime

+ 

+ import module_build_service

+ 

  from module_build_service import conf

- from module_build_service.models import make_session

- from module_build_service.utils.general import mmd_to_str, load_mmd

- from tests import read_staged_data

+ from module_build_service.models import make_db_session, BUILD_STATES

+ from module_build_service.utils.general import mmd_to_str, load_mmd, get_rpm_release

+ from tests import clean_database, read_staged_data, module_build_from_modulemd

  

  BASE_DIR = os.path.dirname(__file__)

  STAGED_DATA_DIR = os.path.join(BASE_DIR, "staged_data")
@@ -61,7 +66,321 @@ 

      return PLATFORM_MODULEMD

  

  

- @pytest.fixture(scope="function")

+ @pytest.fixture()

  def db_session():

-     with make_session(conf) as db_session:

+     with make_db_session(conf) as db_session:

          yield db_session

+ 

+ 

+ @pytest.fixture()

+ def model_tests_init_data(db_session):

+     """Initialize data for model tests

+ 

+     This is refactored from tests/test_models/__init__.py, which was able to be

These comments are great, but their probably better left in the commit message or PR description. I say this because they are only relevant when reviewing the change.

+     called directly inside setup_method generally.

+ 

+     The reason to convert it to this fixture is to use fixture ``db_session``

+     rather than create a new one. That would also benefit the whole test suite

+     to reduce the number of SQLAlchemy session objects.

+     """

+     clean_database()

+ 

+     model_test_data_dir = os.path.join(os.path.dirname(__file__), "test_models", "data")

+ 

+     for filename in os.listdir(model_test_data_dir):

+         with open(os.path.join(model_test_data_dir, filename), "r") as f:

+             yaml = f.read()

+         build = module_build_from_modulemd(yaml)

+         db_session.add(build)

+ 

+     db_session.commit()

+ 

+ 

+ @pytest.fixture()

+ def reuse_component_init_data(db_session):

+     clean_database()

+ 

+     mmd = load_mmd(read_staged_data("formatted_testmodule"))

+ 

+     build_one = module_build_service.models.ModuleBuild(

+         name="testmodule",

+         stream="master",

+         version='20170109091357',

+         state=BUILD_STATES["ready"],

+         ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

+         runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

+         build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",

+         context="78e4a6fd",

+         koji_tag="module-testmodule-master-20170109091357-78e4a6fd",

+         scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",

+         batch=3,

+         owner="Tom Brady",

+         time_submitted=datetime(2017, 2, 15, 16, 8, 18),

+         time_modified=datetime(2017, 2, 15, 16, 19, 35),

+         time_completed=datetime(2017, 2, 15, 16, 19, 35),

+         rebuild_strategy="changed-and-after",

+     )

+ 

+     build_one_component_release = get_rpm_release(db_session, build_one)

+ 

+     mmd.set_version(int(build_one.version))

+     xmd = mmd.get_xmd()

+     xmd["mbs"]["scmurl"] = build_one.scmurl

+     xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"

+     mmd.set_xmd(xmd)

+     build_one.modulemd = mmd_to_str(mmd)

+ 

+     db_session.add(build_one)

+     db_session.commit()

+     db_session.refresh(build_one)

+ 

+     platform_br = module_build_service.models.ModuleBuild.get_by_id(db_session, 1)

+     build_one.buildrequires.append(platform_br)

+ 

+     arch = db_session.query(module_build_service.models.ModuleArch).get(1)

+     build_one.arches.append(arch)

+ 

+     db_session.add_all([

+         module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

+             package="perl-Tangerine",

+             scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

+                    "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",

+             format="rpms",

+             task_id=90276227,

+             state=koji.BUILD_STATES["COMPLETE"],

+             nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release),

+             batch=2,

+             ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",

+             tagged=True,

+             tagged_in_final=True,

+         ),

+         module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

+             package="perl-List-Compare",

+             scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

+                    "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

+             format="rpms",

+             task_id=90276228,

+             state=koji.BUILD_STATES["COMPLETE"],

+             nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release),

+             batch=2,

+             ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

+             tagged=True,

+             tagged_in_final=True,

+         ),

+         module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

+             package="tangerine",

+             scmurl="https://src.fedoraproject.org/rpms/tangerine"

+                    "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",

+             format="rpms",

+             task_id=90276315,

+             state=koji.BUILD_STATES["COMPLETE"],

+             nvr="tangerine-0.22-3.{0}".format(build_one_component_release),

+             batch=3,

+             ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",

+             tagged=True,

+             tagged_in_final=True,

+         ),

+         module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

+             package="module-build-macros",

+             scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

+                    "macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",

+             format="rpms",

+             task_id=90276181,

+             state=koji.BUILD_STATES["COMPLETE"],

+             nvr="module-build-macros-0.1-1.{0}".format(build_one_component_release),

+             batch=1,

+             tagged=True,

+             build_time_only=True,

+         ),

+     ])

+     # Commit component builds added to build_one

+     db_session.commit()

+ 

+     build_two = module_build_service.models.ModuleBuild(

+         name="testmodule",

+         stream="master",

+         version='20170219191323',

+         state=BUILD_STATES["build"],

+         ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

+         runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

+         build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",

+         context="c40c156c",

+         koji_tag="module-testmodule-master-20170219191323-c40c156c",

+         scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a",

+         batch=1,

+         owner="Tom Brady",

+         time_submitted=datetime(2017, 2, 19, 16, 8, 18),

+         time_modified=datetime(2017, 2, 19, 16, 8, 18),

+         rebuild_strategy="changed-and-after",

+     )

+ 

+     build_two_component_release = get_rpm_release(db_session, build_two)

+ 

+     mmd.set_version(int(build_one.version))

+     xmd = mmd.get_xmd()

+     xmd["mbs"]["scmurl"] = build_one.scmurl

+     xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"

+     mmd.set_xmd(xmd)

+     build_two.modulemd = mmd_to_str(mmd)

+ 

+     db_session.add(build_two)

+     db_session.commit()

+     db_session.refresh(build_two)

+ 

+     build_two.arches.append(arch)

+     build_two.buildrequires.append(platform_br)

+ 

+     db_session.add_all([

+         module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

+             package="perl-Tangerine",

+             scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

+                    "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",

+             format="rpms",

+             batch=2,

+             ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",

+         ),

+         module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

+             package="perl-List-Compare",

+             scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

+                    "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

+             format="rpms",

+             batch=2,

+             ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

+         ),

+         module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

+             package="tangerine",

+             scmurl="https://src.fedoraproject.org/rpms/tangerine"

+                    "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",

+             format="rpms",

+             batch=3,

+             ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",

+         ),

+         module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

+             package="module-build-macros",

+             scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

+                    "macros-0.1-1.module_testmodule_master_20170219191323.src.rpm",

+             format="rpms",

+             task_id=90276186,

+             state=koji.BUILD_STATES["COMPLETE"],

+             nvr="module-build-macros-0.1-1.{0}".format(build_two_component_release),

+             batch=1,

+             tagged=True,

+             build_time_only=True,

+         ),

+     ])

+     db_session.commit()

+ 

+ 

+ @pytest.fixture()

+ def reuse_shared_userspace_init_data(db_session):

+     clean_database()

+ 

+     # Create shared-userspace-570, state is COMPLETE, all components

+     # are properly built.

+     mmd = load_mmd(read_staged_data("shared-userspace-570"))

+ 

+     module_build = module_build_service.models.ModuleBuild(

+         name=mmd.get_module_name(),

+         stream=mmd.get_stream_name(),

+         version=mmd.get_version(),

+         build_context="e046b867a400a06a3571f3c71142d497895fefbe",

+         runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",

+         state=BUILD_STATES["ready"],

+         modulemd=mmd_to_str(mmd),

+         koji_tag="module-shared-userspace-f26-20170601141014-75f92abb",

+         scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",

+         batch=16,

+         owner="Tom Brady",

+         time_submitted=datetime(2017, 2, 15, 16, 8, 18),

+         time_modified=datetime(2017, 2, 15, 16, 19, 35),

+         time_completed=datetime(2017, 2, 15, 16, 19, 35),

+         rebuild_strategy="changed-and-after",

+     )

+ 

+     components = [

+         mmd.get_rpm_component(rpm)

+         for rpm in mmd.get_rpm_component_names()

+     ]

+     components.sort(key=lambda x: x.get_buildorder())

+     previous_buildorder = None

+     batch = 1

+     for pkg in components:

+         # Increment the batch number when buildorder increases.

+         if previous_buildorder != pkg.get_buildorder():

+             previous_buildorder = pkg.get_buildorder()

+             batch += 1

+ 

+         pkgref = mmd.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]

+         full_url = pkg.get_repository() + "?#" + pkgref

+ 

+         module_build.component_builds.append(

+             module_build_service.models.ComponentBuild(

+                 package=pkg.get_name(),

+                 format="rpms",

+                 scmurl=full_url,

+                 batch=batch,

+                 ref=pkgref,

+                 state=1,

+                 tagged=True,

+                 tagged_in_final=True,

+             )

+         )

+ 

+     db_session.add(module_build)

+     db_session.commit()

+ 

+     # Create shared-userspace-577, state is WAIT, no component built

+     mmd2 = load_mmd(read_staged_data("shared-userspace-577"))

+ 

+     module_build = module_build_service.models.ModuleBuild(

+         name=mmd2.get_module_name(),

+         stream=mmd2.get_stream_name(),

+         version=mmd2.get_version(),

+         build_context="e046b867a400a06a3571f3c71142d497895fefbe",

+         runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",

+         state=BUILD_STATES["done"],

+         modulemd=mmd_to_str(mmd2),

+         koji_tag="module-shared-userspace-f26-20170605091544-75f92abb",

+         scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",

+         batch=0,

+         owner="Tom Brady",

+         time_submitted=datetime(2017, 2, 15, 16, 8, 18),

+         time_modified=datetime(2017, 2, 15, 16, 19, 35),

+         time_completed=datetime(2017, 2, 15, 16, 19, 35),

+         rebuild_strategy="changed-and-after",

+     )

+ 

+     components2 = [

+         mmd2.get_rpm_component(rpm)

+         for rpm in mmd2.get_rpm_component_names()

+     ]

+     # Store components to database in different order than for 570 to

+     # reproduce the reusing issue.

+     components2.sort(key=lambda x: len(x.get_name()))

+     components2.sort(key=lambda x: x.get_buildorder())

+     previous_buildorder = None

+     batch = 1

+     for pkg in components2:

+         # Increment the batch number when buildorder increases.

+         if previous_buildorder != pkg.get_buildorder():

+             previous_buildorder = pkg.get_buildorder()

+             batch += 1

+ 

+         pkgref = mmd2.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]

+         full_url = pkg.get_repository() + "?#" + pkgref

+ 

+         module_build.component_builds.append(

+             module_build_service.models.ComponentBuild(

+                 package=pkg.get_name(), format="rpms", scmurl=full_url, batch=batch, ref=pkgref)

+         )

+ 

+     db_session.add(module_build)

+     db_session.commit()

file modified
+186 -163
@@ -35,7 +35,7 @@ 

  import module_build_service.scheduler.handlers.repos

  import module_build_service.utils

  from module_build_service.errors import Forbidden

- from module_build_service import db, models, conf, build_logs

+ from module_build_service import models, conf, build_logs

  from module_build_service.scheduler import make_simple_stop_condition

  

  from mock import patch, PropertyMock, Mock
@@ -50,7 +50,7 @@ 

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service.messaging import MBSModule

  from tests import (

-     app, reuse_component_init_data, clean_database, read_staged_data, staged_data_filename

+     app, clean_database, read_staged_data, staged_data_filename

  )

  

  base_dir = dirname(dirname(__file__))
@@ -119,7 +119,8 @@ 

      on_get_task_info_cb = None

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

+         self.db_session = db_session

          self.module_str = module

          self.tag_name = tag_name

          self.config = config
@@ -227,7 +228,9 @@ 

              for nvr in artifacts:

                  # tag_artifacts received a list of NVRs, but the tag message expects the

                  # component name

-                 artifact = models.ComponentBuild.query.filter_by(nvr=nvr).first().package

+                 from sqlalchemy.orm import load_only

+                 artifact = self.db_session.query(models.ComponentBuild).filter_by(

+                     nvr=nvr).options(load_only("package")).first().package

                  self._send_tag(artifact, nvr, dest_tag=dest_tag)

  

      @property
@@ -304,7 +307,8 @@ 

      def recover_orphaned_artifact(self, component_build):

          msgs = []

          if self.INSTANT_COMPLETE:

-             disttag = module_build_service.utils.get_rpm_release(component_build.module_build)

+             disttag = module_build_service.utils.get_rpm_release(

+                 self.db_session, component_build.module_build)

              # We don't know the version or release, so just use a random one here

              nvr = "{0}-1.0-1.{1}".format(component_build.package, disttag)

              component_build.state = koji.BUILD_STATES["COMPLETE"]
@@ -418,22 +422,27 @@ 

  

          FakeModuleBuilder.on_get_task_info_cb = on_get_task_info_cb

  

+         self.p_check_gating = patch(

+             "module_build_service.utils.greenwave.Greenwave.check_gating",

+             return_value=True)

+         self.mock_check_gating = self.p_check_gating.start()

+ 

      def teardown_method(self, test_method):

+         self.p_check_gating.stop()

          FakeModuleBuilder.reset()

          cleanup_moksha()

          for i in range(20):

              try:

-                 os.remove(build_logs.path(i))

+                 with models.make_db_session(conf) as db_session:

+                     os.remove(build_logs.path(db_session, i))

              except Exception:

                  pass

  

      @pytest.mark.parametrize("mmd_version", [1, 2])

-     @patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build(

-         self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg,

-         hmsc, mmd_version, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version, db_session

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -458,9 +467,10 @@ 

          module_build_id = data["id"]

  

          # Check that components are tagged after the batch is built.

-         tag_groups = []

-         tag_groups.append(set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]))

-         tag_groups.append(set(["tangerine-1-1"]))

+         tag_groups = [

+             set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]),

+             set(["tangerine-1-1"]),

+         ]

  

          def on_finalize_cb(cls, succeeded):

              assert succeeded is True
@@ -473,10 +483,11 @@ 

  

          # Check that the components are added to buildroot after the batch

          # is built.

-         buildroot_groups = []

-         buildroot_groups.append(set(["module-build-macros-1-1"]))

-         buildroot_groups.append(set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]))

-         buildroot_groups.append(set(["tangerine-1-1"]))

+         buildroot_groups = [

+             set(["module-build-macros-1-1"]),

+             set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]),

+             set(["tangerine-1-1"]),

+         ]

  

          def on_buildroot_add_artifacts_cb(cls, artifacts, install):

              assert buildroot_groups.pop(0) == set(artifacts)
@@ -487,7 +498,8 @@ 

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in db_session.query(models.ComponentBuild).filter_by(

+                 module_id=module_build_id).all():

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -497,7 +509,7 @@ 

          # All components has to be tagged, so tag_groups and buildroot_groups are empty...

          assert tag_groups == []

          assert buildroot_groups == []

-         module_build = models.ModuleBuild.query.get(module_build_id)

+         module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module_build.module_builds_trace[0].state == models.BUILD_STATES["init"]

          assert module_build.module_builds_trace[1].state == models.BUILD_STATES["wait"]

          assert module_build.module_builds_trace[2].state == models.BUILD_STATES["build"]
@@ -506,17 +518,15 @@ 

          assert len(module_build.module_builds_trace) == 5

  

      @pytest.mark.parametrize("gating_result", (True, False))

-     @patch("module_build_service.utils.greenwave.Greenwave.check_gating")

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_no_components(

-         self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg,

-         hmsc, gating_result, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, gating_result, db_session

      ):

          """

          Tests the build of a module with no components

          """

-         mocked_greenwave.return_value = gating_result

+         self.mock_check_gating.return_value = gating_result

          FakeSCM(

              mocked_scm,

              "python3",
@@ -537,7 +547,7 @@ 

  

          self.run_scheduler(db_session)

  

-         module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          # Make sure no component builds were registered

          assert len(module_build.component_builds) == 0

          # Make sure the build is done
@@ -626,11 +636,13 @@ 

                      data={"yaml": yaml_file},

                  )

              data = json.loads(rv.data)

-             assert data["id"] == 2

+             module_build_id = data["id"]

+             assert module_build_id == 2

  

          self.run_scheduler(db_session)

  

-         assert models.ModuleBuild.query.first().state == models.BUILD_STATES["ready"]

+         module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

+         assert module_build.state == models.BUILD_STATES["ready"]

  

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")
@@ -684,7 +696,7 @@ 

          # Because we did not finished single component build and canceled the

          # module build, all components and even the module itself should be in

          # failed state with state_reason se to cancellation message.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              assert build.state == koji.BUILD_STATES["FAILED"]

              assert build.state_reason == "Canceled by Homer J. Simpson."

              assert build.module_build.state == models.BUILD_STATES["failed"]
@@ -723,7 +735,7 @@ 

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -778,7 +790,7 @@ 

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              # When this fails, it can mean that num_concurrent_builds

              # threshold has been met.
@@ -901,7 +913,7 @@ 

  

          self.run_scheduler(db_session)

  

-         for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              # perl-Tangerine is expected to fail as configured in on_build_cb.

              if c.package == "perl-Tangerine":

                  assert c.state == koji.BUILD_STATES["FAILED"]
@@ -961,7 +973,7 @@ 

  

          self.run_scheduler(db_session)

  

-         for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              # perl-Tangerine is expected to fail as configured in on_build_cb.

              if c.package == "module-build-macros":

                  assert c.state == koji.BUILD_STATES["COMPLETE"]
@@ -980,6 +992,7 @@ 

              # there were failed components in batch 2.

              assert c.module_build.batch == 2

  

+     @pytest.mark.usefixtures("reuse_component_init_data")

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_reuse_all(
@@ -989,8 +1002,6 @@ 

          Tests that we do not try building module-build-macros when reusing all

          components in a module build.

          """

-         reuse_component_init_data()

- 

          def on_build_cb(cls, artifact_name, source):

              raise ValueError("All components should be reused, not build.")

  
@@ -1024,7 +1035,9 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         self.run_scheduler(db_session, msgs=[MBSModule("local module build", 3, 1)])

+         # Create a dedicated database session for scheduler to avoid hang

+         with models.make_db_session(conf) as scheduler_db_session:

+             self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])

  

          reused_component_ids = {

              "module-build-macros": None,
@@ -1035,7 +1048,7 @@ 

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=3).all():

+         for build in models.ModuleBuild.get_by_id(db_session, 3).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -1043,6 +1056,7 @@ 

              ]

              assert build.reused_component_id == reused_component_ids[build.package]

  

+     @pytest.mark.usefixtures("reuse_component_init_data")

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_reuse_all_without_build_macros(
@@ -1052,17 +1066,21 @@ 

          Tests that we can reuse components even when the reused module does

          not have module-build-macros component.

          """

-         reuse_component_init_data()

- 

-         db_session.query(models.ComponentBuild).filter_by(package="module-build-macros").delete()

-         assert (

-             0 == db_session.query(models.ComponentBuild)

-                            .filter_by(package="module-build-macros")

-                            .count()

-         )

+         # Firstly, remove all existing module-build-macros component builds

  

+         macros_cb_query = db_session.query(models.ComponentBuild).filter_by(

+             package="module-build-macros")

+         db_session.query(models.ComponentBuildTrace).filter(

+             models.ComponentBuildTrace.component_id.in_(

+                 [cb.id for cb in macros_cb_query.all()]

+             )

+         ).delete(synchronize_session=False)

+         macros_cb_query.delete(synchronize_session=False)

          db_session.commit()

  

+         # Just ensure the success of removal

+         assert 0 == macros_cb_query.count()

+ 

          def on_build_cb(cls, artifact_name, source):

              raise ValueError("All components should be reused, not build.")

  
@@ -1096,11 +1114,12 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         self.run_scheduler(db_session, msgs=[MBSModule("local module build", 3, 1)])

+         with models.make_db_session(conf) as scheduler_db_session:

+             self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in db_session.query(models.ComponentBuild).filter_by(module_id=3).all():

+         for build in models.ModuleBuild.get_by_id(db_session, 3).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -1137,6 +1156,10 @@ 

          build_one.time_submitted = submitted_time

          build_one.time_modified = now

          build_one.rebuild_strategy = "changed-and-after"

+ 

+         db_session.add(build_one)

+         db_session.commit()

+ 

          # It went from init, to wait, to build, and then failed

          mbt_one = models.ModuleBuildTrace(

              state_time=submitted_time, state=models.BUILD_STATES["init"]
@@ -1152,55 +1175,53 @@ 

          build_one.module_builds_trace.append(mbt_two)

          build_one.module_builds_trace.append(mbt_three)

          build_one.module_builds_trace.append(mbt_four)

+ 

+         db_session.commit()

+ 

          # Successful component

-         component_one = models.ComponentBuild()

-         component_one.package = "perl-Tangerine"

-         component_one.format = "rpms"

-         component_one.scmurl = "https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master"

-         component_one.state = koji.BUILD_STATES["COMPLETE"]

-         component_one.nvr = "perl-Tangerine-0:0.22-2.module+0+d027b723"

-         component_one.batch = 2

-         component_one.module_id = 2

-         component_one.ref = "7e96446223f1ad84a26c7cf23d6591cd9f6326c6"

-         component_one.tagged = True

-         component_one.tagged_in_final = True

-         # Failed component

-         component_two = models.ComponentBuild()

-         component_two.package = "perl-List-Compare"

-         component_two.format = "rpms"

-         component_two.scmurl = \

-             "https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master"

-         component_two.state = koji.BUILD_STATES["FAILED"]

-         component_two.batch = 2

-         component_two.module_id = 2

-         # Component that isn't started yet

-         component_three = models.ComponentBuild()

-         component_three.package = "tangerine"

-         component_three.format = "rpms"

-         component_three.scmurl = "https://src.stg.fedoraproject.org/rpms/tangerine.git?#master"

-         component_three.batch = 3

-         component_three.module_id = 2

-         # module-build-macros

-         component_four = models.ComponentBuild()

-         component_four.package = "module-build-macros"

-         component_four.format = "rpms"

-         component_four.state = koji.BUILD_STATES["COMPLETE"]

-         component_four.scmurl = (

-             "/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."

-             "module_testmodule_master_20170109091357.src.rpm"

-         )

-         component_four.batch = 1

-         component_four.module_id = 2

-         component_four.tagged = True

-         component_four.build_time_only = True

- 

-         db.session.add(build_one)

-         db.session.add(component_one)

-         db.session.add(component_two)

-         db.session.add(component_three)

-         db.session.add(component_four)

-         db.session.commit()

-         db.session.expire_all()

+         db_session.add_all([

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="perl-Tangerine",

+                 format="rpms",

+                 scmurl="https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master",

+                 state=koji.BUILD_STATES["COMPLETE"],

+                 nvr="perl-Tangerine-0:0.22-2.module+0+d027b723",

+                 batch=2,

+                 ref="7e96446223f1ad84a26c7cf23d6591cd9f6326c6",

+                 tagged=True,

+                 tagged_in_final=True),

+             # Failed component

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="perl-List-Compare",

+                 format="rpms",

+                 scmurl="https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master",

+                 state=koji.BUILD_STATES["FAILED"],

+                 batch=2),

+             # Component that isn't started yet

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="tangerine",

+                 format="rpms",

+                 scmurl="https://src.stg.fedoraproject.org/rpms/tangerine.git?#master",

+                 batch=3),

+             # module-build-macros

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="module-build-macros",

+                 format="rpms",

+                 state=koji.BUILD_STATES["COMPLETE"],

+                 scmurl=(

+                     "/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."

+                     "module_testmodule_master_20170109091357.src.rpm"

+                 ),

+                 batch=1,

+                 tagged=True,

+                 build_time_only=True),

+         ])

+         db_session.commit()

+         db_session.expire_all()

  

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -1216,28 +1237,27 @@ 

  

          data = json.loads(rv.data)

          module_build_id = data["id"]

-         module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          components = (

-             models.ComponentBuild

-                   .query

-                   .filter_by(module_id=module_build_id, batch=2)

-                   .order_by(models.ComponentBuild.id)

-                   .all()

+             db_session.query(models.ComponentBuild)

+             .filter_by(module_id=module_build_id, batch=2)

+             .order_by(models.ComponentBuild.id)

+             .all()

          )

          # Make sure the build went from failed to wait

          assert module_build.state == models.BUILD_STATES["wait"]

          assert module_build.state_reason == "Resubmitted by Homer J. Simpson"

          # Make sure the state was reset on the failed component

          assert components[1].state is None

-         db.session.expire_all()

+         db_session.expire_all()

  

          # Run the backend

-         self.run_scheduler(db_session)

+         with models.make_db_session(conf) as scheduler_db_session:

+             self.run_scheduler(scheduler_db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(

-                 module_id=module_build_id).all():

+         for build in module_build.component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -1256,6 +1276,7 @@ 

          FakeModuleBuilder.INSTANT_COMPLETE = True

          now = datetime.utcnow()

          submitted_time = now - timedelta(minutes=3)

+ 

          # Create a module in the failed state

          build_one = models.ModuleBuild()

          build_one.name = "testmodule"
@@ -1276,6 +1297,9 @@ 

          build_one.time_submitted = submitted_time

          build_one.time_modified = now

          build_one.rebuild_strategy = "changed-and-after"

+         db_session.add(build_one)

+         db_session.commit()

+ 

          # It went from init, to wait, to build, and then failed

          mbt_one = models.ModuleBuildTrace(

              state_time=submitted_time, state=models.BUILD_STATES["init"])
@@ -1288,46 +1312,45 @@ 

          build_one.module_builds_trace.append(mbt_two)

          build_one.module_builds_trace.append(mbt_three)

          build_one.module_builds_trace.append(mbt_four)

+ 

+         db_session.commit()

+         db_session.expire_all()

+ 

          # Components that haven't started yet

-         component_one = models.ComponentBuild()

-         component_one.package = "perl-Tangerine"

-         component_one.format = "rpms"

-         component_one.scmurl = "https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master"

-         component_one.batch = 2

-         component_one.module_id = 2

-         component_two = models.ComponentBuild()

-         component_two.package = "perl-List-Compare"

-         component_two.format = "rpms"

-         component_two.scmurl = \

-             "https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master"

-         component_two.batch = 2

-         component_two.module_id = 2

-         component_three = models.ComponentBuild()

-         component_three.package = "tangerine"

-         component_three.format = "rpms"

-         component_three.scmurl = "https://src.stg.fedoraproject.org/rpms/tangerine.git?#master"

-         component_three.batch = 3

-         component_three.module_id = 2

-         # Failed module-build-macros

-         component_four = models.ComponentBuild()

-         component_four.package = "module-build-macros"

-         component_four.format = "rpms"

-         component_four.state = koji.BUILD_STATES["FAILED"]

-         component_four.scmurl = (

-             "/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."

-             "module_testmodule_master_20180205135154.src.rpm"

-         )

-         component_four.batch = 1

-         component_four.module_id = 2

-         component_four.build_time_only = True

- 

-         db.session.add(build_one)

-         db.session.add(component_one)

-         db.session.add(component_two)

-         db.session.add(component_three)

-         db.session.add(component_four)

-         db.session.commit()

-         db.session.expire_all()

+         db_session.add_all([

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="perl-Tangerine",

+                 format="rpms",

+                 scmurl="https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master",

+                 batch=2),

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="perl-List-Compare",

+                 format="rpms",

+                 scmurl="https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master",

+                 batch=2),

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="tangerine",

+                 format="rpms",

+                 scmurl="https://src.stg.fedoraproject.org/rpms/tangerine.git?#master",

+                 batch=3),

+             # Failed module-build-macros

+             models.ComponentBuild(

+                 module_id=build_one.id,

+                 package="module-build-macros",

+                 format="rpms",

+                 state=koji.BUILD_STATES["FAILED"],

+                 scmurl=(

+                     "/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."

+                     "module_testmodule_master_20180205135154.src.rpm"

+                 ),

+                 batch=1,

+                 build_time_only=True)

+         ])

+ 

+         db_session.commit()

  

          FakeSCM(mocked_scm, "testmodule", "testmodule.yaml", "7fea453")

          # Resubmit the failed module
@@ -1342,21 +1365,22 @@ 

  

          data = json.loads(rv.data)

          module_build_id = data["id"]

-         module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          # Make sure the build went from failed to wait

          assert module_build.state == models.BUILD_STATES["wait"]

          assert module_build.state_reason == "Resubmitted by Homer J. Simpson"

          # Make sure the state was reset on the failed component

          for c in module_build.component_builds:

              assert c.state is None

-         db.session.expire_all()

+         db_session.expire_all()

  

          # Run the backend

-         self.run_scheduler(db_session)

+         with models.make_db_session(conf) as scheduler_db_session:

+             self.run_scheduler(scheduler_db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -1374,7 +1398,7 @@ 

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")

  

-         with patch("module_build_service.utils.submit.format_mmd") as mock_format_mmd:

+         with patch("module_build_service.scheduler.handlers.modules.format_mmd") as mock_format_mmd:

              mock_format_mmd.side_effect = Forbidden("Custom component repositories aren't allowed.")

              rv = self.client.post(

                  "/module-build-service/1/module-builds/",
@@ -1389,7 +1413,7 @@ 

              cleanup_moksha()

  

          module_build_id = json.loads(rv.data)["id"]

-         module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module_build.state == models.BUILD_STATES["failed"]

          assert module_build.state_reason == "Custom component repositories aren't allowed."

          assert len(module_build.module_builds_trace) == 2
@@ -1397,7 +1421,7 @@ 

          assert module_build.module_builds_trace[1].state == models.BUILD_STATES["failed"]

  

          # Resubmit the failed module

-         rv = self.client.post(

+         self.client.post(

              "/module-build-service/1/module-builds/",

              data=json.dumps({

                  "branch": "master",
@@ -1410,7 +1434,8 @@ 

  

          module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

          components = (

-             models.ComponentBuild.query.filter_by(module_id=module_build_id, batch=2)

+             db_session.query(models.ComponentBuild)

+             .filter_by(module_id=module_build_id, batch=2)

              .order_by(models.ComponentBuild.id)

              .all()

          )
@@ -1419,25 +1444,24 @@ 

          assert module_build.state_reason == "Resubmitted by Homer J. Simpson"

          # Make sure there are no components

          assert components == []

-         db.session.expire_all()

+         db_session.expire_all()

  

          # Run the backend again

          self.run_scheduler(db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],

                  models.BUILD_STATES["ready"],

              ]

  

-     @patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_init_fail(

-         self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that resuming the build fails when the build is in init state
@@ -1665,18 +1689,17 @@ 

                  msg_id="a faked internal message", repo_tag=module.koji_tag + "-build"

              )

          ]

-         db.session.expire_all()

+         db_session.expire_all()

          # Stop after processing the seeded message

          self.run_scheduler(db_session, msgs, lambda message: True)

          # Make sure the module build didn't fail so that the poller can resume it later

-         module = db_session.query(models.ModuleBuild).get(module_build_id)

+         module = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module.state == models.BUILD_STATES["build"]

  

-     @patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_br_metadata_only_module(

-         self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc, db_session

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Test that when a build is submitted with a buildrequire without a Koji tag,
@@ -1685,7 +1708,7 @@ 

          metadata_mmd = module_build_service.utils.load_mmd(

              read_staged_data("build_metadata_module")

          )

-         module_build_service.utils.import_mmd(db.session, metadata_mmd)

+         module_build_service.utils.import_mmd(db_session, metadata_mmd)

  

          FakeSCM(

              mocked_scm,
@@ -1712,7 +1735,7 @@ 

          FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb

          self.run_scheduler(db_session)

  

-         module = db_session.query(models.ModuleBuild).get(module_build_id)

+         module = models.ModuleBuild.get_by_id(db_session, module_build_id)

          assert module.state == models.BUILD_STATES["ready"]

  

  
@@ -1732,7 +1755,8 @@ 

          cleanup_moksha()

          for i in range(20):

              try:

-                 os.remove(build_logs.path(i))

+                 with models.make_db_session(conf) as db_session:

+                     os.remove(build_logs.path(db_session, i))

              except Exception:

                  pass

  
@@ -1750,8 +1774,7 @@ 

          """

          Tests local module build dependency.

          """

-         # with app.app_context():

-         module_build_service.utils.load_local_builds(["platform"])

+         module_build_service.utils.load_local_builds(db_session, ["platform"])

          FakeSCM(

              mocked_scm,

              "testmodule",
@@ -1779,7 +1802,7 @@ 

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],

file modified
+21 -20
@@ -26,7 +26,7 @@ 

  import module_build_service.builder

  import module_build_service.resolver

  

- from tests import init_data, db

+ from tests import init_data

  

  from module_build_service.builder import GenericBuilder

  from mock import patch
@@ -35,11 +35,10 @@ 

  class TestGenericBuilder:

      def setup_method(self, test_method):

          init_data(1)

-         self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()

  

      @patch("module_build_service.resolver.DBResolver")

-     @patch("module_build_service.resolver.GenericResolver")

-     def test_default_buildroot_groups_cache(self, generic_resolver, resolver):

+     @patch("module_build_service.builder.base.GenericResolver")

+     def test_default_buildroot_groups_cache(self, generic_resolver, resolver, db_session):

          mbs_groups = {"buildroot": [], "srpm-buildroot": []}

  

          resolver = mock.MagicMock()
@@ -48,27 +47,29 @@ 

  

          expected_groups = {"build": [], "srpm-build": []}

  

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             # Call default_buildroot_groups, the result should be cached.

-             ret = GenericBuilder.default_buildroot_groups(db.session, self.module)

-             assert ret == expected_groups

-             resolver.resolve_profiles.assert_called_once()

-             resolver.resolve_profiles.reset_mock()

+         module = module_build_service.models.ModuleBuild.get_by_id(db_session, 1)

+ 

+         generic_resolver.create.return_value = resolver

+         # Call default_buildroot_groups, the result should be cached.

+         ret = GenericBuilder.default_buildroot_groups(db_session, module)

+         assert ret == expected_groups

+         resolver.resolve_profiles.assert_called_once()

+         resolver.resolve_profiles.reset_mock()

  

          # Now try calling it again to verify resolve_profiles is not called,

          # because it is cached.

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             ret = GenericBuilder.default_buildroot_groups(db.session, self.module)

-             assert ret == expected_groups

-             resolver.resolve_profiles.assert_not_called()

-             resolver.resolve_profiles.reset_mock()

+         generic_resolver.create.return_value = resolver

+         ret = GenericBuilder.default_buildroot_groups(db_session, module)

+         assert ret == expected_groups

+         resolver.resolve_profiles.assert_not_called()

+         resolver.resolve_profiles.reset_mock()

  

          # And now try clearing the cache and call it again.

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             GenericBuilder.clear_cache(self.module)

-             ret = GenericBuilder.default_buildroot_groups(db.session, self.module)

-             assert ret == expected_groups

-             resolver.resolve_profiles.assert_called_once()

+         generic_resolver.create.return_value = resolver

+         GenericBuilder.clear_cache(module)

+         ret = GenericBuilder.default_buildroot_groups(db_session, module)

+         assert ret == expected_groups

+         resolver.resolve_profiles.assert_called_once()

  

      def test_get_build_weights(self):

          weights = GenericBuilder.get_build_weights(["httpd", "apr"])

file modified
+94 -57
@@ -33,13 +33,13 @@ 

  import module_build_service.scheduler.handlers.repos

  import module_build_service.models

  import module_build_service.builder

- from module_build_service import Modulemd, db

+ from module_build_service import Modulemd

  from module_build_service.utils.general import mmd_to_str

  

  import pytest

  from mock import patch, MagicMock

  

- from tests import conf, init_data, reuse_component_init_data, clean_database, make_module

+ from tests import conf, init_data, clean_database, make_module

  

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  
@@ -109,7 +109,6 @@ 

          self.config = mock.Mock()

          self.config.koji_profile = conf.koji_profile

          self.config.koji_repository_url = conf.koji_repository_url

-         self.module = module_build_service.models.ModuleBuild.query.filter_by(id=2).one()

  

          self.p_read_config = patch(

              "koji.read_config",
@@ -139,9 +138,12 @@ 

      def test_recover_orphaned_artifact_when_tagged(self, db_session):

          """ Test recover_orphaned_artifact when the artifact is found and tagged in both tags

          """

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=[],
@@ -184,9 +186,12 @@ 

      def test_recover_orphaned_artifact_when_untagged(self, db_session):

          """ Tests recover_orphaned_artifact when the build is found but untagged

          """

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=[],
@@ -227,9 +232,12 @@ 

      def test_recover_orphaned_artifact_when_nothing_exists(self, db_session):

          """ Test recover_orphaned_artifact when the build is not found

          """

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=[],
@@ -255,13 +263,15 @@ 

          assert builder.koji_session.tagBuild.call_count == 0

  

      @patch("koji.util")

-     def test_buildroot_ready(self, mocked_kojiutil):

+     def test_buildroot_ready(self, mocked_kojiutil, db_session):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

  

          attrs = {"checkForBuilds.return_value": None, "checkForBuilds.side_effect": IOError}

          mocked_kojiutil.configure_mock(**attrs)

          fake_kmb = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-nginx-1.2",

              components=[],
@@ -273,21 +283,25 @@ 

          assert mocked_kojiutil.checkForBuilds.call_count == 3

  

      @pytest.mark.parametrize("blocklist", [False, True])

-     def test_tagging_already_tagged_artifacts(self, blocklist):

+     def test_tagging_already_tagged_artifacts(self, blocklist, db_session):

          """

          Tests that buildroot_add_artifacts and tag_artifacts do not try to

          tag already tagged artifacts

          """

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          if blocklist:

-             mmd = self.module.mmd()

+             mmd = module_build.mmd()

              xmd = mmd.get_xmd()

              xmd["mbs_options"] = {"blocked_packages": ["foo", "bar", "new"]}

              mmd.set_xmd(xmd)

-             self.module.modulemd = mmd_to_str(mmd)

+             module_build.modulemd = mmd_to_str(mmd)

+             db_session.commit()

  

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-nginx-1.2",

              components=[],
@@ -328,10 +342,12 @@ 

  

      @patch.object(FakeKojiModuleBuilder, "get_session")

      @patch.object(FakeKojiModuleBuilder, "_get_tagged_nvrs")

-     def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session):

+     def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session, db_session):

          """

          Tests that only tagged artifacts will be untagged

          """

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          mock_session = mock.Mock()

          mock_session.getTag.side_effect = [

              {"name": "foobar", "id": 1},
@@ -340,8 +356,9 @@ 

          mock_get_session.return_value = mock_session

          mock_get_tagged_nvrs.side_effect = [["foo", "bar"], ["foo"]]

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=[],
@@ -471,45 +488,51 @@ 

      @pytest.mark.parametrize("custom_whitelist", [False, True])

      @pytest.mark.parametrize("repo_include_all", [False, True])

      def test_buildroot_connect(

-         self, custom_whitelist, blocklist, repo_include_all

+         self, custom_whitelist, blocklist, repo_include_all, db_session

      ):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          if blocklist:

-             mmd = self.module.mmd()

+             mmd = module_build.mmd()

              xmd = mmd.get_xmd()

              xmd["mbs_options"] = {"blocked_packages": ["foo", "nginx"]}

              mmd.set_xmd(xmd)

-             self.module.modulemd = mmd_to_str(mmd)

+             module_build.modulemd = mmd_to_str(mmd)

+             db_session.commit()

  

          if custom_whitelist:

-             mmd = self.module.mmd()

+             mmd = module_build.mmd()

              opts = Modulemd.Buildopts()

              opts.add_rpm_to_whitelist("custom1")

              opts.add_rpm_to_whitelist("custom2")

              mmd.set_buildopts(opts)

-             self.module.modulemd = mmd_to_str(mmd)

+             module_build.modulemd = mmd_to_str(mmd)

          else:

              # Set some irrelevant buildopts options to test that KojiModuleBuilder

              # is not confused by this.

-             mmd = self.module.mmd()

+             mmd = module_build.mmd()

              opts = Modulemd.Buildopts()

              opts.set_rpm_macros("%my_macro 1")

              mmd.set_buildopts(opts)

-             self.module.modulemd = mmd_to_str(mmd)

+             module_build.modulemd = mmd_to_str(mmd)

+         db_session.commit()

  

          if repo_include_all is False:

-             mmd = self.module.mmd()

+             mmd = module_build.mmd()

              xmd = mmd.get_xmd()

              mbs_options = xmd["mbs_options"] if "mbs_options" in xmd.keys() else {}

              mbs_options["repo_include_all"] = False

              xmd["mbs_options"] = mbs_options

              mmd.set_xmd(xmd)

-             self.module.modulemd = mmd_to_str(mmd)

+             module_build.modulemd = mmd_to_str(mmd)

+             db_session.commit()

  

-         self.module.arches.append(module_build_service.models.ModuleArch(name="i686"))

+         module_build.arches.append(module_build_service.models.ModuleArch(name="i686"))

  

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=["nginx"],
@@ -577,17 +600,21 @@ 

          assert session.editTag2.mock_calls == expected_calls

  

      @pytest.mark.parametrize("blocklist", [False, True])

-     def test_buildroot_connect_create_tag(self, blocklist):

+     def test_buildroot_connect_create_tag(self, blocklist, db_session):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          if blocklist:

-             mmd = self.module.mmd()

+             mmd = module_build.mmd()

              xmd = mmd.get_xmd()

              xmd["mbs_options"] = {"blocked_packages": ["foo", "nginx"]}

              mmd.set_xmd(xmd)

-             self.module.modulemd = mmd_to_str(mmd)

+             module_build.modulemd = mmd_to_str(mmd)

+             db_session.commit()

  

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=["nginx"],
@@ -610,13 +637,17 @@ 

          assert session.packageListBlock.mock_calls == expected_calls

  

      @pytest.mark.parametrize("scratch", [False, True])

-     def test_buildroot_connect_create_target(self, scratch):

+     def test_buildroot_connect_create_target(self, scratch, db_session):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          if scratch:

-             self.module.scratch = scratch

+             module_build.scratch = scratch

+             db_session.commit()

  

          builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

+             db_session=db_session,

+             owner=module_build.owner,

+             module=module_build,

              config=conf,

              tag_name="module-foo",

              components=["nginx"],
@@ -641,7 +672,7 @@ 

          assert session.createBuildTarget.mock_calls == expected_calls

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_built_rpms_in_module_build(self, ClientSession):

+     def test_get_built_rpms_in_module_build(self, ClientSession, db_session):

          session = ClientSession.return_value

          session.listTaggedRPMS.return_value = (

              [
@@ -679,20 +710,23 @@ 

              [],

          )

  

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+ 

          # Module builds generated by init_data uses generic modulemd file and

          # the module's name/stream/version/context does not have to match it.

          # But for this test, we need it to match.

-         mmd = self.module.mmd()

-         self.module.name = mmd.get_module_name()

-         self.module.stream = mmd.get_stream_name()

-         self.module.version = mmd.get_version()

-         self.module.context = mmd.get_context()

-         db.session.commit()

+         mmd = module_build.mmd()

+         module_build.name = mmd.get_module_name()

+         module_build.stream = mmd.get_stream_name()

+         module_build.version = mmd.get_version()

+         module_build.context = mmd.get_context()

+         db_session.commit()

  

          ret = KojiModuleBuilder.get_built_rpms_in_module_build(mmd)

          assert set(ret) == set(["bar-2:1.30-4.el8+1308+551bfa71", "tar-2:1.30-4.el8+1308+551bfa71"])

          session.assert_not_called()

  

+     @pytest.mark.usefixtures("reuse_component_init_data")

      @pytest.mark.parametrize(

          "br_filtered_rpms,expected",

          (
@@ -775,7 +809,6 @@ 

                  },

              ],

          )

-         reuse_component_init_data()

          current_module = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

          rv = KojiModuleBuilder._get_filtered_rpms_on_self_dep(current_module, br_filtered_rpms)

          assert set(rv) == set(expected)
@@ -785,8 +818,9 @@ 

          "cg_enabled,cg_devel_enabled", [(False, False), (True, False), (True, True)]

      )

      @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiContentGenerator")

-     def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled):

-         self.module.state = 2

+     def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled, db_session):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         module_build.state = 2

          with patch(

              "module_build_service.config.Config.koji_enable_content_generator",

              new_callable=mock.PropertyMock,
@@ -798,8 +832,9 @@ 

                  return_value=cg_devel_enabled,

              ):

                  builder = FakeKojiModuleBuilder(

-                     owner=self.module.owner,

-                     module=self.module,

+                     db_session=db_session,

+                     owner=module_build.owner,

+                     module=module_build,

                      config=conf,

                      tag_name="module-nginx-1.2",

                      components=[],
@@ -825,17 +860,19 @@ 

  

      @patch.dict("sys.modules", krbV=MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):

-         builder = KojiModuleBuilder("owner", self.module, conf, "module-tag", [])

+     def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession, db_session):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         builder = KojiModuleBuilder(db_session, "owner", module_build, conf, "module-tag", [])

          builder.koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_module_build_arches(self, ClientSession):

+     def test_get_module_build_arches(self, ClientSession, db_session):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          arches = "x86_64 i686 ppc64le aarch64 s390x"

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": arches}

-         ret = KojiModuleBuilder.get_module_build_arches(self.module)

+         ret = KojiModuleBuilder.get_module_build_arches(module_build)

          assert " ".join(ret) == arches

  

  

file modified
+70 -72
@@ -8,7 +8,7 @@ 

  import kobo.rpmlib

  

  from module_build_service import conf

- from module_build_service.models import ModuleBuild, ComponentBuild, make_session

+ from module_build_service.models import ModuleBuild, ComponentBuild

  from module_build_service.builder.MockModuleBuilder import MockModuleBuilder

  from module_build_service.utils import import_fake_base_module, mmd_to_str, load_mmd

  from tests import clean_database, make_module, read_staged_data
@@ -23,7 +23,7 @@ 

          clean_database()

          shutil.rmtree(self.resultdir)

  

-     def _create_module_with_filters(self, session, batch, state):

+     def _create_module_with_filters(self, db_session, batch, state):

          mmd = load_mmd(read_staged_data("testmodule-with-filters"))

          # Set the name and stream

          mmd = mmd.copy("mbs-testmodule", "test")
@@ -63,7 +63,7 @@ 

              }

          })

          module = ModuleBuild.create(

-             session,

+             db_session,

              conf,

              name="mbs-testmodule",

              stream="test",
@@ -74,8 +74,8 @@ 

          )

          module.koji_tag = "module-mbs-testmodule-test-20171027111452"

          module.batch = batch

-         session.add(module)

-         session.commit()

+         db_session.add(module)

+         db_session.commit()

  

          comp_builds = [

              {
@@ -105,86 +105,82 @@ 

          ]

  

          for build in comp_builds:

-             session.add(ComponentBuild(**build))

-         session.commit()

+             db_session.add(ComponentBuild(**build))

+         db_session.commit()

  

          return module

  

      @mock.patch("module_build_service.conf.system", new="mock")

-     def test_createrepo_filter_last_batch(self, *args):

-         with make_session(conf) as session:

-             module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])

- 

-             builder = MockModuleBuilder(

-                 "mcurlej", module, conf, module.koji_tag, module.component_builds

-             )

-             builder.resultsdir = self.resultdir

-             rpms = [

-                 "ed-1.14.1-4.module+24957a32.x86_64.rpm",

-                 "mksh-56b-1.module+24957a32.x86_64.rpm",

-                 "module-build-macros-0.1-1.module+24957a32.noarch.rpm",

-             ]

-             rpm_qf_output = dedent("""\

-                 ed 0 1.14.1 4.module+24957a32 x86_64

-                 mksh 0 56b-1 module+24957a32 x86_64

-                 module-build-macros 0 0.1 1.module+24957a32 noarch

-             """)

-             with mock.patch("os.listdir", return_value=rpms):

-                 with mock.patch("subprocess.check_output", return_value=rpm_qf_output):

-                     builder._createrepo()

- 

-             with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:

-                 pkglist = fd.read().strip()

-                 rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]

-                 assert "ed" not in rpm_names

+     def test_createrepo_filter_last_batch(self, db_session):

+         module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])

  

-     @mock.patch("module_build_service.conf.system", new="mock")

-     def test_createrepo_not_last_batch(self):

-         with make_session(conf) as session:

-             module = self._create_module_with_filters(session, 2, koji.BUILD_STATES["COMPLETE"])

- 

-             builder = MockModuleBuilder(

-                 "mcurlej", module, conf, module.koji_tag, module.component_builds

-             )

-             builder.resultsdir = self.resultdir

-             rpms = [

-                 "ed-1.14.1-4.module+24957a32.x86_64.rpm",

-                 "mksh-56b-1.module+24957a32.x86_64.rpm",

-             ]

-             rpm_qf_output = dedent("""\

-                 ed 0 1.14.1 4.module+24957a32 x86_64

-                 mksh 0 56b-1 module+24957a32 x86_64

-             """)

-             with mock.patch("os.listdir", return_value=rpms):

-                 with mock.patch("subprocess.check_output", return_value=rpm_qf_output):

-                     builder._createrepo()

- 

-             with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:

-                 pkglist = fd.read().strip()

-                 rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]

-                 assert "ed" in rpm_names

+         builder = MockModuleBuilder(

+             db_session, "mcurlej", module, conf, module.koji_tag, module.component_builds

+         )

+         builder.resultsdir = self.resultdir

+         rpms = [

+             "ed-1.14.1-4.module+24957a32.x86_64.rpm",

+             "mksh-56b-1.module+24957a32.x86_64.rpm",

+             "module-build-macros-0.1-1.module+24957a32.noarch.rpm",

+         ]

+         rpm_qf_output = dedent("""\

+             ed 0 1.14.1 4.module+24957a32 x86_64

+             mksh 0 56b-1 module+24957a32 x86_64

+             module-build-macros 0 0.1 1.module+24957a32 noarch

+         """)

+         with mock.patch("os.listdir", return_value=rpms):

+             with mock.patch("subprocess.check_output", return_value=rpm_qf_output):

+                 builder._createrepo()

+ 

+         with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:

+             pkglist = fd.read().strip()

+             rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]

+             assert "ed" not in rpm_names

  

      @mock.patch("module_build_service.conf.system", new="mock")

-     def test_createrepo_empty_rmp_list(self, *args):

-         with make_session(conf) as session:

-             module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])

- 

-             builder = MockModuleBuilder(

-                 "mcurlej", module, conf, module.koji_tag, module.component_builds)

-             builder.resultsdir = self.resultdir

-             rpms = []

-             with mock.patch("os.listdir", return_value=rpms):

+     def test_createrepo_not_last_batch(self, db_session):

+         module = self._create_module_with_filters(db_session, 2, koji.BUILD_STATES["COMPLETE"])

+ 

+         builder = MockModuleBuilder(

+             db_session, "mcurlej", module, conf, module.koji_tag, module.component_builds

+         )

+         builder.resultsdir = self.resultdir

+         rpms = [

+             "ed-1.14.1-4.module+24957a32.x86_64.rpm",

+             "mksh-56b-1.module+24957a32.x86_64.rpm",

+         ]

+         rpm_qf_output = dedent("""\

+             ed 0 1.14.1 4.module+24957a32 x86_64

+             mksh 0 56b-1 module+24957a32 x86_64

+         """)

+         with mock.patch("os.listdir", return_value=rpms):

+             with mock.patch("subprocess.check_output", return_value=rpm_qf_output):

                  builder._createrepo()

  

-             with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:

-                 pkglist = fd.read().strip()

-                 assert not pkglist

+         with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:

+             pkglist = fd.read().strip()

+             rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]

+             assert "ed" in rpm_names

+ 

+     @mock.patch("module_build_service.conf.system", new="mock")

+     def test_createrepo_empty_rmp_list(self, db_session):

+         module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])

+ 

+         builder = MockModuleBuilder(

+             db_session, "mcurlej", module, conf, module.koji_tag, module.component_builds)

+         builder.resultsdir = self.resultdir

+         rpms = []

+         with mock.patch("os.listdir", return_value=rpms):

+             builder._createrepo()

+ 

+         with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:

+             pkglist = fd.read().strip()

+             assert not pkglist

  

  

  class TestMockModuleBuilderAddRepos:

      def setup_method(self, test_method):

          clean_database(add_platform_module=False)

-         import_fake_base_module("platform:f29:1:000000")

  

      @mock.patch("module_build_service.conf.system", new="mock")

      @mock.patch(
@@ -203,6 +199,8 @@ 

      def test_buildroot_add_repos(

          self, write_config, load_config, patched_open, base_module_repofiles, db_session

      ):

+         import_fake_base_module(db_session, "platform:f29:1:000000")

+ 

          platform = ModuleBuild.get_last_build_in_stream(db_session, "platform", "f29")

          foo = make_module(

              db_session, "foo:1:1:1", {"platform": ["f29"]}, {"platform": ["f29"]})
@@ -215,7 +213,7 @@ 

              mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value,

          ]

  

-         builder = MockModuleBuilder("user", app, conf, "module-app", [])

+         builder = MockModuleBuilder(db_session, "user", app, conf, "module-app", [])

  

          dependencies = {

              "repofile://": [platform.mmd()],

@@ -71,7 +71,8 @@ 

  

          # Ensure that there is no build log from other tests

          try:

-             file_path = build_logs.path(self.cg.module)

+             with models.make_db_session(conf) as db_session:

+                 file_path = build_logs.path(db_session, self.cg.module)

              os.remove(file_path)

          except OSError:

              pass
@@ -88,7 +89,8 @@ 

          import moksha.hub.reactor  # noqa

  

          try:

-             file_path = build_logs.path(self.cg.module)

+             with models.make_db_session(conf) as db_session:

+                 file_path = build_logs.path(db_session, self.cg.module)

              os.remove(file_path)

          except OSError:

              pass
@@ -135,7 +137,8 @@ 

              expected_output = json.load(expected_output_file)

  

          # create the build.log

-         build_logs.start(self.cg.module)

+         with models.make_db_session(conf) as db_session:

+             build_logs.start(db_session, self.cg.module)

          build_logs.stop(self.cg.module)

  

          self.cg.devel = devel

file modified
+11 -11
@@ -60,16 +60,16 @@ 

          MBSConsumer.current_module_build_id = None

          shutil.rmtree(self.base)

  

-     def test_module_build_logs(self):

+     def test_module_build_logs(self, db_session):

          """

          Tests that ModuleBuildLogs is logging properly to build log file.

          """

-         build = models.ModuleBuild.query.filter_by(id=2).one()

+         build = models.ModuleBuild.get_by_id(db_session, 2)

  

          # Initialize logging, get the build log path and remove it to

          # ensure we are not using some garbage from previous failed test.

-         self.build_log.start(build)

-         path = self.build_log.path(build)

+         self.build_log.start(db_session, build)

+         path = self.build_log.path(db_session, build)

          assert path[len(self.base):] == "/build-2.log"

          if os.path.exists(path):

              os.unlink(path)
@@ -86,7 +86,7 @@ 

          # Try logging with current_module_build_id set to 2 and then to 2.

          # Only messages with current_module_build_id set to 2 should appear in

          # the log.

-         self.build_log.start(build)

+         self.build_log.start(db_session, build)

          MBSConsumer.current_module_build_id = 1

          log.debug("ignore this test msg1")

          log.info("ignore this test msg1")
@@ -119,13 +119,13 @@ 

              data = f.read()

              assert data.find("ignore this test msg3") == -1

  

-     def test_module_build_logs_name_format(self):

-         build = models.ModuleBuild.query.filter_by(id=2).one()

+     def test_module_build_logs_name_format(self, db_session):

+         build = models.ModuleBuild.get_by_id(db_session, 2)

  

          log1 = ModuleBuildLogs("/some/path", "build-{id}.log")

-         assert log1.name(build) == "build-2.log"

-         assert log1.path(build) == "/some/path/build-2.log"

+         assert log1.name(db_session, build) == "build-2.log"

+         assert log1.path(db_session, build) == "/some/path/build-2.log"

  

          log2 = ModuleBuildLogs("/some/path", "build-{name}-{stream}-{version}.log")

-         assert log2.name(build) == "build-nginx-1-2.log"

-         assert log2.path(build) == "/some/path/build-nginx-1-2.log"

+         assert log2.name(db_session, build) == "build-nginx-1-2.log"

+         assert log2.path(db_session, build) == "/some/path/build-nginx-1-2.log"

file modified
+45 -45
@@ -18,20 +18,15 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  import pytest

- from mock import patch, mock_open, ANY

+ from mock import patch, mock_open, ANY, Mock

  

- from module_build_service import app, conf

+ from module_build_service import app

  from module_build_service.manage import retire, build_module_locally

- from module_build_service.models import BUILD_STATES, ModuleBuild, make_session

- from tests.test_models import clean_database, init_data

+ from module_build_service.models import BUILD_STATES, ModuleBuild

  

  

+ @pytest.mark.usefixtures("model_tests_init_data")

  class TestMBSManage:

-     def setup_method(self, test_method):

-         init_data()

- 

-     def teardown_method(self, test_method):

-         clean_database(False, False)

  

      @pytest.mark.parametrize(

          ("identifier", "is_valid"),
@@ -64,34 +59,33 @@ 

          ),

      )

      @patch("module_build_service.manage.prompt_bool")

-     def test_retire_build(self, prompt_bool, overrides, identifier, changed_count):

+     def test_retire_build(self, prompt_bool, overrides, identifier, changed_count, db_session):

          prompt_bool.return_value = True

  

-         with make_session(conf) as session:

-             module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()

-             # Verify our assumption of the amount of ModuleBuilds in database

-             assert len(module_builds) == 3

+         module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()

+         # Verify our assumption of the amount of ModuleBuilds in database

+         assert len(module_builds) == 3

  

-             for x, build in enumerate(module_builds):

-                 build.name = "spam"

-                 build.stream = "eggs"

-                 build.version = "ham"

-                 build.context = str(x)

+         for x, build in enumerate(module_builds):

+             build.name = "spam"

+             build.stream = "eggs"

+             build.version = "ham"

+             build.context = str(x)

  

-             for attr, value in overrides.items():

-                 setattr(module_builds[0], attr, value)

+         for attr, value in overrides.items():

+             setattr(module_builds[0], attr, value)

  

-             session.commit()

+         db_session.commit()

  

-             retire(identifier)

-             retired_module_builds = (

-                 session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()

-             )

+         retire(identifier)

+         retired_module_builds = (

+             db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()

+         )

  

-             assert len(retired_module_builds) == changed_count

-             for x in range(changed_count):

-                 assert retired_module_builds[x].id == module_builds[x].id

-                 assert retired_module_builds[x].state == BUILD_STATES["garbage"]

+         assert len(retired_module_builds) == changed_count

+         for x in range(changed_count):

+             assert retired_module_builds[x].id == module_builds[x].id

+             assert retired_module_builds[x].state == BUILD_STATES["garbage"]

  

      @pytest.mark.parametrize(

          ("confirm_prompt", "confirm_arg", "confirm_expected"),
@@ -104,25 +98,24 @@ 

      )

      @patch("module_build_service.manage.prompt_bool")

      def test_retire_build_confirm_prompt(

-         self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected

+         self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected, db_session

      ):

          prompt_bool.return_value = confirm_prompt

  

-         with make_session(conf) as session:

-             module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()

-             # Verify our assumption of the amount of ModuleBuilds in database

-             assert len(module_builds) == 3

+         module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()

+         # Verify our assumption of the amount of ModuleBuilds in database

+         assert len(module_builds) == 3

  

-             for x, build in enumerate(module_builds):

-                 build.name = "spam"

-                 build.stream = "eggs"

+         for x, build in enumerate(module_builds):

+             build.name = "spam"

+             build.stream = "eggs"

  

-             session.commit()

+         db_session.commit()

  

-             retire("spam:eggs", confirm_arg)

-             retired_module_builds = (

-                 session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()

-             )

+         retire("spam:eggs", confirm_arg)

+         retired_module_builds = (

+             db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()

+         )

  

          expected_changed_count = 3 if confirm_expected else 0

          assert len(retired_module_builds) == expected_changed_count
@@ -131,8 +124,13 @@ 

      @patch("module_build_service.manage.submit_module_build_from_yaml")

      @patch("module_build_service.scheduler.main")

      @patch("module_build_service.manage.conf.set_item")

+     @patch("module_build_service.models.make_db_session")

      def test_build_module_locally_set_stream(

-             self, conf_set_item, main, submit_module_build_from_yaml, patched_open):

+         self, make_db_session, conf_set_item, main, submit_module_build_from_yaml, patched_open

+     ):

+         mock_db_session = Mock()

+         make_db_session.return_value.__enter__.return_value = mock_db_session

+ 

          # build_module_locally changes database uri to a local SQLite database file.

          # Restore the uri to original one in order to not impact the database

          # session in subsequent tests.
@@ -144,5 +142,7 @@ 

              app.config['SQLALCHEMY_DATABASE_URI'] = original_db_uri

  

          submit_module_build_from_yaml.assert_called_once_with(

-             ANY, ANY, {"default_streams": {"platform": "el8"}, "local_build": True},

+             mock_db_session, ANY, ANY, {

+                 "default_streams": {"platform": "el8"}, "local_build": True

+             },

              skiptests=False, stream="foo")

@@ -19,47 +19,3 @@ 

  # SOFTWARE.

  #

  # Written by Ralph Bean <rbean@redhat.com>

- 

- import os

- from datetime import datetime

- 

- import module_build_service

- from module_build_service.utils import load_mmd

- 

- from tests import db, clean_database

- from module_build_service.config import init_config

- from module_build_service.models import ModuleBuild, BUILD_STATES

- 

- app = module_build_service.app

- 

- conf = init_config(app)

- 

- datadir = os.path.dirname(__file__) + "/data/"

- 

- 

- def module_build_from_modulemd(yaml):

-     mmd = load_mmd(yaml)

-     build = ModuleBuild()

-     build.name = mmd.get_module_name()

-     build.stream = mmd.get_stream_name()

-     build.version = mmd.get_version()

-     build.state = BUILD_STATES["ready"]

-     build.modulemd = yaml

-     build.koji_tag = None

-     build.batch = 0

-     build.owner = "some_other_user"

-     build.time_submitted = datetime(2016, 9, 3, 12, 28, 33)

-     build.time_modified = datetime(2016, 9, 3, 12, 28, 40)

-     build.time_completed = None

-     build.rebuild_strategy = "changed-and-after"

-     return build

- 

- 

- def init_data():

-     clean_database()

-     for filename in os.listdir(datadir):

-         with open(datadir + filename, "r") as f:

-             yaml = f.read()

-         build = module_build_from_modulemd(yaml)

-         db.session.add(build)

-     db.session.commit()

file modified
+122 -117
@@ -24,47 +24,48 @@ 

  

  from mock import patch

  from module_build_service import conf

- from module_build_service.models import ComponentBuild, ModuleBuild, make_session

+ from module_build_service.models import ComponentBuild, ComponentBuildTrace, ModuleBuild

  from module_build_service.utils.general import mmd_to_str, load_mmd

  from tests import init_data as init_data_contexts, clean_database, make_module, read_staged_data

- from tests.test_models import init_data, module_build_from_modulemd

+ from tests import module_build_from_modulemd

  

  

+ @pytest.mark.usefixtures("model_tests_init_data")

  class TestModels:

-     def setup_method(self, test_method):

-         init_data()

- 

-     def test_app_sqlalchemy_events(self):

-         with make_session(conf) as session:

-             component_build = ComponentBuild()

-             component_build.package = "before_models_committed"

-             component_build.scmurl = (

-                 "git://pkgs.domain.local/rpms/before_models_committed?"

-                 "#9999999999999999999999999999999999999999"

-             )

-             component_build.format = "rpms"

-             component_build.task_id = 999999999

-             component_build.state = 1

-             component_build.nvr = \

-                 "before_models_committed-0.0.0-0.module_before_models_committed_0_0"

-             component_build.batch = 1

-             component_build.module_id = 1

- 

-             session.add(component_build)

-             session.commit()

- 

-         with make_session(conf) as session:

-             c = session.query(ComponentBuild).filter(ComponentBuild.id == 1).one()

-             assert c.component_builds_trace[0].id == 1

-             assert c.component_builds_trace[0].component_id == 1

-             assert c.component_builds_trace[0].state == 1

-             assert c.component_builds_trace[0].state_reason is None

-             assert c.component_builds_trace[0].task_id == 999999999

- 

-     def test_context_functions(self):

+ 

+     def test_app_sqlalchemy_events(self, db_session):

+         component_build = ComponentBuild(

+             package="before_models_committed",

+             scmurl="git://pkgs.domain.local/rpms/before_models_committed?"

+                    "#9999999999999999999999999999999999999999",

+             format="rpms",

+             task_id=999999999,

+             state=1,

+             nvr="before_models_committed-0.0.0-0.module_before_models_committed_0_0",

+             batch=1,

+             module_id=1,

+         )

+ 

+         db_session.add(component_build)

+         db_session.commit()

+ 

+         component_builds_trace = db_session.query(ComponentBuildTrace).filter(

+             ComponentBuildTrace.component_id == component_build.id).one()

+         db_session.commit()

+ 

+         assert component_builds_trace.id == 1

+         assert component_builds_trace.component_id == 1

+         assert component_builds_trace.state == 1

+         assert component_builds_trace.state_reason is None

+         assert component_builds_trace.task_id == 999999999

+ 

+     def test_context_functions(self, db_session):

          """ Test that the build_context, runtime_context, and context hashes are correctly

          determined"""

-         build = ModuleBuild.query.filter_by(id=1).one()

+         db_session.commit()

+ 

+         build = ModuleBuild.get_by_id(db_session, 1)

+         db_session.commit()

          build.modulemd = read_staged_data("testmodule_dependencies")

          (

              build.ref_build_context,
@@ -89,8 +90,12 @@ 

              build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str(i)

              db_session.add(build)

          db_session.commit()

+ 

          build_one = ModuleBuild.get_by_id(db_session, 2)

-         assert build_one.siblings == [3, 4]

+         sibling_ids = build_one.siblings(db_session)

+         db_session.commit()

+ 

+         assert sibling_ids == [3, 4]

  

      @pytest.mark.parametrize(

          "stream,right_pad,expected",
@@ -111,100 +116,100 @@ 

  

  

  class TestModelsGetStreamsContexts:

-     def test_get_last_build_in_all_streams(self):

+     def test_get_last_build_in_all_streams(self, db_session):

          init_data_contexts(contexts=True)

-         with make_session(conf) as session:

-             builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")

-             builds = sorted([

-                 "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds

-             ])

-             assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]

- 

-     def test_get_last_build_in_all_stream_last_version(self):

+         builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")

+         builds = sorted([

+             "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds

+         ])

+         db_session.commit()

+         assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]

+ 

+     def test_get_last_build_in_all_stream_last_version(self, db_session):

          init_data_contexts(contexts=False)

-         with make_session(conf) as session:

-             builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")

-             builds = [

-                 "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds

-             ]

-             assert builds == ["nginx:1:11"]

- 

-     def test_get_last_builds_in_stream(self):

+         builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")

+         builds = [

+             "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds

+         ]

+         db_session.commit()

+         assert builds == ["nginx:1:11"]

+ 

+     def test_get_last_builds_in_stream(self, db_session):

          init_data_contexts(contexts=True)

-         with make_session(conf) as session:

-             builds = ModuleBuild.get_last_builds_in_stream(session, "nginx", "1")

-             builds = [

-                 "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

-                 for build in builds

-             ]

-             assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]

- 

-     def test_get_last_builds_in_stream_version_lte(self):

+         builds = ModuleBuild.get_last_builds_in_stream(db_session, "nginx", "1")

+         builds = [

+             "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

+             for build in builds

+         ]

+         db_session.commit()

+         assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]

+ 

+     def test_get_last_builds_in_stream_version_lte(self, db_session):

          init_data_contexts(1, multiple_stream_versions=True)

-         with make_session(conf) as session:

-             builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290100)

-             builds = set([

-                 "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

-                 for build in builds

-             ])

-             assert builds == set(["platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"])

- 

-     def test_get_last_builds_in_stream_version_lte_different_versions(self):

+         builds = ModuleBuild.get_last_builds_in_stream_version_lte(db_session, "platform", 290100)

+         builds = set([

+             "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

+             for build in builds

+         ])

+         db_session.commit()

+         assert builds == set(["platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"])

+ 

+     def test_get_last_builds_in_stream_version_lte_different_versions(self, db_session):

          """

          Tests that get_last_builds_in_stream_version_lte works in case the

          name:stream_ver modules have different versions.

          """

          clean_database(False)

  

-         with make_session(conf) as db_session:

-             make_module(

-                 db_session, "platform:f29.1.0:10:old_version", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.1.0:15:c11.another", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.1.0:15:c11", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.2.0:0:old_version", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.2.0:1:c11", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])

- 

-             builds = ModuleBuild.get_last_builds_in_stream_version_lte(

-                 db_session, "platform", 290200)

-             builds = set([

-                 "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

-                 for build in builds

-             ])

-             assert builds == set([

-                 "platform:f29.1.0:15:c11",

-                 "platform:f29.1.0:15:c11.another",

-                 "platform:f29.2.0:1:c11",

-             ])

- 

-     def test_get_module_count(self):

+         make_module(

+             db_session, "platform:f29.1.0:10:old_version", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.1.0:15:c11.another", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.1.0:15:c11", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.2.0:0:old_version", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.2.0:1:c11", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])

+ 

+         builds = ModuleBuild.get_last_builds_in_stream_version_lte(

+             db_session, "platform", 290200)

+         builds = set([

+             "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

+             for build in builds

+         ])

+         db_session.commit()

+         assert builds == set([

+             "platform:f29.1.0:15:c11",

+             "platform:f29.1.0:15:c11.another",

+             "platform:f29.2.0:1:c11",

+         ])

+ 

+     def test_get_module_count(self, db_session):

          clean_database(False)

-         with make_session(conf) as db_session:

-             make_module(db_session, "platform:f29.1.0:10:c11", {}, {})

-             make_module(db_session, "platform:f29.1.0:10:c12", {}, {})

+         make_module(db_session, "platform:f29.1.0:10:c11", {}, {})

+         make_module(db_session, "platform:f29.1.0:10:c12", {}, {})

  

-             count = ModuleBuild.get_module_count(db_session, name="platform")

-             assert count == 2

+         count = ModuleBuild.get_module_count(db_session, name="platform")

+         db_session.commit()

+         assert count == 2

  

-     def test_add_virtual_streams_filter(self):

+     def test_add_virtual_streams_filter(self, db_session):

          clean_database(False)

  

-         with make_session(conf) as db_session:

-             make_module(db_session, "platform:f29.1.0:10:c1", {}, {}, virtual_streams=["f29"])

-             make_module(db_session, "platform:f29.1.0:15:c1", {}, {}, virtual_streams=["f29"])

-             make_module(

-                 db_session, "platform:f29.3.0:15:old_version", {}, {},

-                 virtual_streams=["f28", "f29"])

-             make_module(db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f30"])

- 

-             query = db_session.query(ModuleBuild).filter_by(name="platform")

-             query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])

-             count = query.count()

-             assert count == 3

+         make_module(db_session, "platform:f29.1.0:10:c1", {}, {}, virtual_streams=["f29"])

+         make_module(db_session, "platform:f29.1.0:15:c1", {}, {}, virtual_streams=["f29"])

+         make_module(

+             db_session, "platform:f29.3.0:15:old_version", {}, {},

+             virtual_streams=["f28", "f29"])

+         make_module(db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f30"])

+ 

+         query = db_session.query(ModuleBuild).filter_by(name="platform")

+         query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])

+         count = query.count()

+         db_session.commit()

+         assert count == 3

file modified
+12 -12
@@ -69,26 +69,26 @@ 

  

  @mock.patch("module_build_service.monitor.builder_failed_counter.labels")

  @mock.patch("module_build_service.monitor.builder_success_counter.inc")

- def test_monitor_state_changing_success(succ_cnt, failed_cnt):

+ def test_monitor_state_changing_success(succ_cnt, failed_cnt, db_session):

      conf = mbs_config.Config(TestConfiguration)

-     with models.make_session(conf) as db_session:

-         b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

-         b.transition(conf, models.BUILD_STATES["wait"])

-         b.transition(conf, models.BUILD_STATES["build"])

-         b.transition(conf, models.BUILD_STATES["done"])

+     b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+     b.transition(db_session, conf, models.BUILD_STATES["wait"])

+     b.transition(db_session, conf, models.BUILD_STATES["build"])

+     b.transition(db_session, conf, models.BUILD_STATES["done"])

+     db_session.commit()

      succ_cnt.assert_called_once()

      failed_cnt.assert_not_called()

  

  

  @mock.patch("module_build_service.monitor.builder_failed_counter.labels")

  @mock.patch("module_build_service.monitor.builder_success_counter.inc")

- def test_monitor_state_changing_failure(succ_cnt, failed_cnt):

+ def test_monitor_state_changing_failure(succ_cnt, failed_cnt, db_session):

      failure_type = "user"

      conf = mbs_config.Config(TestConfiguration)

-     with models.make_session(conf) as db_session:

-         b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

-         b.transition(conf, models.BUILD_STATES["wait"])

-         b.transition(conf, models.BUILD_STATES["build"])

-         b.transition(conf, models.BUILD_STATES["failed"], failure_type=failure_type)

+     b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+     b.transition(db_session, conf, models.BUILD_STATES["wait"])

+     b.transition(db_session, conf, models.BUILD_STATES["build"])

+     b.transition(db_session, conf, models.BUILD_STATES["failed"], failure_type=failure_type)

+     db_session.commit()

      succ_cnt.assert_not_called()

      failed_cnt.assert_called_once_with(reason=failure_type)

file modified
+57 -58
@@ -27,60 +27,58 @@ 

  import pytest

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service import app, conf, db, models, utils, Modulemd

+ from module_build_service import models, utils, Modulemd

  from module_build_service.utils import import_mmd, mmd_to_str, load_mmd

  from module_build_service.models import ModuleBuild

  import tests

  

  

- base_dir = os.path.join(os.path.dirname(__file__), "..")

- 

- 

+ @pytest.mark.usefixtures("reuse_component_init_data")

  class TestDBModule:

-     def setup_method(self):

-         tests.reuse_component_init_data()

  

-     def test_get_buildrequired_modulemds(self):

+     def test_get_buildrequired_modulemds(self, db_session):

          mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

-         with models.make_session(conf) as db_session:

-             import_mmd(db_session, mmd)

-             platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

-             mmd = tests.make_module(db_session,

-                                     "testmodule:master:20170109091357:123",

-                                     store_to_db=False)

-             build = ModuleBuild(

-                 name="testmodule",

-                 stream="master",

-                 version=20170109091357,

-                 state=5,

-                 build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",

-                 runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",

-                 context="7c29193d",

-                 koji_tag="module-testmodule-master-20170109091357-7c29193d",

-                 scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",

-                 batch=3,

-                 owner="Dr. Pepper",

-                 time_submitted=datetime(2018, 11, 15, 16, 8, 18),

-                 time_modified=datetime(2018, 11, 15, 16, 19, 35),

-                 rebuild_strategy="changed-and-after",

-                 modulemd=mmd_to_str(mmd),

-             )

-             build.buildrequires.append(platform_f300103)

-             db_session.add(build)

-             db_session.commit()

  

-             platform_nsvc = platform_f300103.mmd().get_nsvc()

+         import_mmd(db_session, mmd)

+         platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

+         mmd = tests.make_module(db_session,

+                                 "testmodule:master:20170109091357:123",

+                                 store_to_db=False)

+         build = ModuleBuild(

+             name="testmodule",

+             stream="master",

+             version=20170109091357,

+             state=5,

+             build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",

+             runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",

+             context="7c29193d",

+             koji_tag="module-testmodule-master-20170109091357-7c29193d",

+             scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",

+             batch=3,

+             owner="Dr. Pepper",

+             time_submitted=datetime(2018, 11, 15, 16, 8, 18),

+             time_modified=datetime(2018, 11, 15, 16, 19, 35),

+             rebuild_strategy="changed-and-after",

+             modulemd=mmd_to_str(mmd),

+         )

+         build.buildrequires.append(platform_f300103)

+         db_session.add(build)

+         db_session.commit()

+ 

+         platform_nsvc = platform_f300103.mmd().get_nsvc()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.get_buildrequired_modulemds("testmodule", "master", platform_nsvc)

          nsvcs = set([m.get_nsvc() for m in result])

          assert nsvcs == set(["testmodule:master:20170109091357:123"])

  

      @pytest.mark.parametrize("stream_versions", [False, True])

-     def test_get_compatible_base_module_modulemds_stream_versions(self, stream_versions):

+     def test_get_compatible_base_module_modulemds_stream_versions(

+         self, stream_versions, db_session

+     ):

          tests.init_data(1, multiple_stream_versions=True)

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.get_compatible_base_module_modulemds(

              "platform", "f29.1.0", stream_version_lte=stream_versions, virtual_streams=["f29"],

              states=[models.BUILD_STATES["ready"]])
@@ -109,7 +107,7 @@ 

              mmd.set_xmd(xmd)

              module.modulemd = mmd_to_str(mmd)

              db_session.commit()

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.get_module_build_dependencies(

              "testmodule", "master", "20170109091357", "78e4a6fd").keys()

          assert set(result) == expected
@@ -142,7 +140,7 @@ 

  

          db_session.commit()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.get_module_build_dependencies(

              "testmodule2", "master", "20180123171545", "c40c156c").keys()

          assert set(result) == set(["module-f28-build"])
@@ -155,25 +153,26 @@ 

          new_callable=PropertyMock,

          return_value=tests.staged_data_filename("local_builds"),

      )

-     def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system):

+     def test_get_module_build_dependencies_recursive_requires(

+         self, resultdir, conf_system, db_session

+     ):

          """

          Tests that it returns the requires of the buildrequires recursively

          """

-         with app.app_context():

-             utils.load_local_builds(["platform", "parent", "child", "testmodule"])

+         utils.load_local_builds(db_session, ["platform", "parent", "child", "testmodule"])

  

-             build = models.ModuleBuild.local_modules(db.session, "child", "master")

-             resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-             result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()

+         build = models.ModuleBuild.local_modules(db_session, "child", "master")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

+         result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()

  

-             local_path = tests.staged_data_filename("local_builds")

+         local_path = tests.staged_data_filename("local_builds")

  

-             expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]

-             assert set(result) == set(expected)

+         expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]

+         assert set(result) == set(expected)

  

      def test_resolve_requires(self, db_session):

          build = models.ModuleBuild.get_by_id(db_session, 2)

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.resolve_requires(

              [":".join([build.name, build.stream, build.version, build.context])]

          )
@@ -193,7 +192,7 @@ 

          Tests that the profiles get resolved recursively

          """

          mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

          expected = {

              "buildroot": set([
@@ -246,26 +245,26 @@ 

          """

          Test that profiles get resolved recursively on local builds

          """

-         utils.load_local_builds(["platform"])

+         utils.load_local_builds(db_session, ["platform"])

          mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

          expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

          assert result == expected

  

-     def test_get_latest_with_virtual_stream(self):

+     def test_get_latest_with_virtual_stream(self, db_session):

          tests.init_data(1, multiple_stream_versions=True)

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          mmd = resolver.get_latest_with_virtual_stream("platform", "f29")

          assert mmd

          assert mmd.get_stream_name() == "f29.2.0"

  

-     def test_get_latest_with_virtual_stream_none(self):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+     def test_get_latest_with_virtual_stream_none(self, db_session):

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")

          assert not mmd

  

-     def test_get_module_count(self):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+     def test_get_module_count(self, db_session):

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")

          count = resolver.get_module_count(name="platform", stream="f28")

          assert count == 1

@@ -20,27 +20,22 @@ 

  #

  # Written by Jan Kaluza <jkaluza@redhat.com>

  

- import os

+ import pytest

  from datetime import datetime

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service import db

  from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd

  from module_build_service.models import ModuleBuild

  import tests

  

  

- base_dir = os.path.join(os.path.dirname(__file__), "..")

- 

- 

+ @pytest.mark.usefixtures("reuse_component_init_data")

  class TestLocalResolverModule:

-     def setup_method(self):

-         tests.reuse_component_init_data()

  

-     def test_get_buildrequired_modulemds(self):

+     def test_get_buildrequired_modulemds(self, db_session):

          mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f8")

-         import_mmd(db.session, mmd)

+         import_mmd(db_session, mmd)

          platform_f8 = ModuleBuild.query.filter_by(stream="f8").one()

          mmd = mmd.copy("testmodule", "master")

          mmd.set_version(20170109091357)
@@ -62,10 +57,10 @@ 

              rebuild_strategy="changed-and-after",

              modulemd=mmd_to_str(mmd),

          )

-         db.session.add(build)

-         db.session.commit()

+         db_session.add(build)

+         db_session.commit()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="local")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="local")

          result = resolver.get_buildrequired_modulemds(

              "testmodule", "master", platform_f8.mmd().get_nsvc())

          nsvcs = set([m.get_nsvc() for m in result])

file modified
+48 -53
@@ -18,24 +18,18 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  

- import os

- 

  from mock import patch, PropertyMock, Mock, call

  

  import module_build_service.resolver as mbs_resolver

  import module_build_service.utils

- from module_build_service import conf, models

  from module_build_service.utils.general import mmd_to_str

  import module_build_service.models

  import tests

  

  

- base_dir = os.path.join(os.path.dirname(__file__), "..")

- 

- 

  class TestMBSModule:

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):

+     def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e, db_session):

          """ Tests for querying a module from mbs """

          mock_res = Mock()

          mock_res.ok.return_value = True
@@ -54,7 +48,7 @@ 

  

          mock_session.get.return_value = mock_res

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          module_mmds = resolver.get_module_modulemds(

              "testmodule", "master", "20180205135154", "9c690d0e", virtual_streams=["f28"]

          )
@@ -81,7 +75,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_modulemds_partial(

-         self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed

+         self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed, db_session

      ):

          """ Test for querying MBS without the context of a module """

  
@@ -110,7 +104,7 @@ 

          }

  

          mock_session.get.return_value = mock_res

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          ret = resolver.get_module_modulemds("testmodule", "master", version)

          nsvcs = set(

              m.get_nsvc()
@@ -136,7 +130,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_build_dependencies(

-         self, mock_session, platform_mmd, testmodule_mmd_9c690d0e

+         self, mock_session, platform_mmd, testmodule_mmd_9c690d0e, db_session

      ):

          """

          Tests that we return just direct build-time dependencies of testmodule.
@@ -173,7 +167,7 @@ 

  

          mock_session.get.return_value = mock_res

          expected = set(["module-f28-build"])

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          result = resolver.get_module_build_dependencies(

              "testmodule", "master", "20180205135154", "9c690d0e").keys()

  
@@ -213,7 +207,7 @@ 

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_build_dependencies_empty_buildrequires(

-         self, mock_session, testmodule_mmd_9c690d0e

+         self, mock_session, testmodule_mmd_9c690d0e, db_session

      ):

  

          mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e)
@@ -246,7 +240,7 @@ 

  

          expected = set()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          result = resolver.get_module_build_dependencies(

              "testmodule", "master", "20180205135154", "9c690d0e"

          ).keys()
@@ -266,7 +260,9 @@ 

          assert set(result) == expected

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_resolve_profiles(self, mock_session, formatted_testmodule_mmd, platform_mmd):

+     def test_resolve_profiles(

+         self, mock_session, formatted_testmodule_mmd, platform_mmd, db_session

+     ):

  

          mock_res = Mock()

          mock_res.ok.return_value = True
@@ -284,7 +280,7 @@ 

          }

  

          mock_session.get.return_value = mock_res

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          result = resolver.resolve_profiles(

              formatted_testmodule_mmd, ("buildroot", "srpm-buildroot")

          )
@@ -351,55 +347,54 @@ 

          return_value=tests.staged_data_filename("local_builds")

      )

      def test_resolve_profiles_local_module(

-         self, local_builds, conf_system, formatted_testmodule_mmd

+         self, local_builds, conf_system, formatted_testmodule_mmd, db_session

      ):

          tests.clean_database()

          with tests.app.app_context():

-             module_build_service.utils.load_local_builds(["platform"])

+             module_build_service.utils.load_local_builds(db_session, ["platform"])

  

-             resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+             resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

              result = resolver.resolve_profiles(

                  formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))

              expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

              assert result == expected

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_empty_buildrequired_modulemds(self, mock_session):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         mock_session.get.return_value = Mock(ok=True)

-         mock_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}

+     def test_get_empty_buildrequired_modulemds(self, request_session, db_session):

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

+         request_session.get.return_value = Mock(ok=True)

+         request_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}

  

          result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")

          assert [] == result

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_buildrequired_modulemds(self, mock_session):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+     def test_get_buildrequired_modulemds(self, mock_session, db_session):

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          mock_session.get.return_value = Mock(ok=True)

-         with models.make_session(conf) as db_session:

-             mock_session.get.return_value.json.return_value = {

-                 "items": [

-                     {

-                         "name": "nodejs",

-                         "stream": "10",

-                         "version": 1,

-                         "context": "c1",

-                         "modulemd": mmd_to_str(

-                             tests.make_module(db_session, "nodejs:10:1:c1", store_to_db=False),

-                         ),

-                     },

-                     {

-                         "name": "nodejs",

-                         "stream": "10",

-                         "version": 2,

-                         "context": "c1",

-                         "modulemd": mmd_to_str(

-                             tests.make_module(db_session, "nodejs:10:2:c1", store_to_db=False),

-                         ),

-                     },

-                 ],

-                 "meta": {"next": None},

-             }

+         mock_session.get.return_value.json.return_value = {

+             "items": [

+                 {

+                     "name": "nodejs",

+                     "stream": "10",

+                     "version": 1,

+                     "context": "c1",

+                     "modulemd": mmd_to_str(

+                         tests.make_module(db_session, "nodejs:10:1:c1", store_to_db=False),

+                     ),

+                 },

+                 {

+                     "name": "nodejs",

+                     "stream": "10",

+                     "version": 2,

+                     "context": "c1",

+                     "modulemd": mmd_to_str(

+                         tests.make_module(db_session, "nodejs:10:2:c1", store_to_db=False),

+                     ),

+                 },

+             ],

+             "meta": {"next": None},

+         }

  

          result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")

  
@@ -411,7 +406,7 @@ 

          assert "c1" == mmd.get_context()

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_count(self, mock_session):

+     def test_get_module_count(self, mock_session, db_session):

          mock_res = Mock()

          mock_res.ok.return_value = True

          mock_res.json.return_value = {
@@ -420,7 +415,7 @@ 

          }

          mock_session.get.return_value = mock_res

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          count = resolver.get_module_count(name="platform", stream="f28")

  

          assert count == 5
@@ -430,7 +425,7 @@ 

          )

  

      @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd):

+     def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd, db_session):

          mock_res = Mock()

          mock_res.ok.return_value = True

          mock_res.json.return_value = {
@@ -447,7 +442,7 @@ 

          }

          mock_session.get.return_value = mock_res

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

          mmd = resolver.get_latest_with_virtual_stream("platform", "virtualf28")

  

          assert mmd.get_module_name() == "platform"

@@ -25,7 +25,7 @@ 

  from mock import call, patch, Mock

  from sqlalchemy import func

  

- from module_build_service import conf, db

+ from module_build_service import conf

  from module_build_service.models import BUILD_STATES, ModuleBuild

  from module_build_service.scheduler.consumer import MBSConsumer

  from module_build_service.scheduler.handlers.greenwave import get_corresponding_module_build
@@ -40,10 +40,10 @@ 

          clean_database()

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession):

+     def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession, db_session):

          ClientSession.return_value.getBuild.return_value = None

  

-         assert get_corresponding_module_build(db.session, "n-v-r") is None

+         assert get_corresponding_module_build(db_session, "n-v-r") is None

  

      @pytest.mark.parametrize(

          "build_info",
@@ -57,32 +57,34 @@ 

          ],

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info):

+     def test_cannot_find_module_build_id_from_build_info(

+         self, ClientSession, build_info, db_session

+     ):

          ClientSession.return_value.getBuild.return_value = build_info

  

-         assert get_corresponding_module_build(db.session, "n-v-r") is None

+         assert get_corresponding_module_build(db_session, "n-v-r") is None

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession):

-         fake_module_build_id, = db.session.query(func.max(ModuleBuild.id)).first()

+     def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession, db_session):

+         fake_module_build_id, = db_session.query(func.max(ModuleBuild.id)).first()

  

          ClientSession.return_value.getBuild.return_value = {

              "extra": {"typeinfo": {"module": {"module_build_service_id": fake_module_build_id + 1}}}

          }

  

-         assert get_corresponding_module_build(db.session, "n-v-r") is None

+         assert get_corresponding_module_build(db_session, "n-v-r") is None

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_find_the_module_build(self, ClientSession):

+     def test_find_the_module_build(self, ClientSession, db_session):

          expected_module_build = (

-             db.session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()

+             db_session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()

          )

  

          ClientSession.return_value.getBuild.return_value = {

              "extra": {"typeinfo": {"module": {"module_build_service_id": expected_module_build.id}}}

          }

  

-         build = get_corresponding_module_build(db.session, "n-v-r")

+         build = get_corresponding_module_build(db_session, "n-v-r")

  

          assert expected_module_build.id == build.id

          assert expected_module_build.name == build.name
@@ -92,9 +94,9 @@ 

      """Test handler decision_update"""

  

      @patch("module_build_service.scheduler.handlers.greenwave.log")

-     def test_decision_context_is_not_match(self, log):

+     def test_decision_context_is_not_match(self, log, db_session):

          msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing")

-         decision_update(conf, db.session, msg)

+         decision_update(conf, db_session, msg)

          log.debug.assert_called_once_with(

              'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"',

              "msg-id-1",
@@ -102,14 +104,14 @@ 

          )

  

      @patch("module_build_service.scheduler.handlers.greenwave.log")

-     def test_not_satisfy_policies(self, log):

+     def test_not_satisfy_policies(self, log, db_session):

          msg = Mock(

              msg_id="msg-id-1",

              decision_context="test_dec_context",

              policies_satisfied=False,

              subject_identifier="pkg-0.1-1.c1",

          )

-         decision_update(conf, db.session, msg)

+         decision_update(conf, db_session, msg)

          log.debug.assert_called_once_with(

              "Skip to handle module build %s because it has not satisfied Greenwave policies.",

              msg.subject_identifier,
@@ -117,21 +119,21 @@ 

  

      @patch("module_build_service.messaging.publish")

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_transform_from_done_to_ready(self, ClientSession, publish):

+     def test_transform_from_done_to_ready(self, ClientSession, publish, db_session):

          clean_database()

  

          # This build should be queried and transformed to ready state

-         module_build = make_module(db.session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+         module_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

          module_build.transition(

-             conf, BUILD_STATES["done"], "Move to done directly for running test."

+             db_session, conf, BUILD_STATES["done"], "Move to done directly for running test."

          )

-         db.session.commit()

+         db_session.commit()

  

          # Assert this call below

          first_publish_call = call(

              service="mbs",

              topic="module.state.change",

-             msg=module_build.json(show_tasks=False),

+             msg=module_build.json(db_session, show_tasks=False),

              conf=conf,

          )

  
@@ -153,9 +155,7 @@ 

          consumer.consume(msg)

  

          # Load module build again to check its state is moved correctly

-         module_build = (

-             db.session.query(ModuleBuild).filter(ModuleBuild.id == module_build.id).first())

- 

+         db_session.refresh(module_build)

          assert BUILD_STATES["ready"] == module_build.state

  

          publish.assert_has_calls([
@@ -163,7 +163,7 @@ 

              call(

                  service="mbs",

                  topic="module.state.change",

-                 msg=module_build.json(show_tasks=False),

+                 msg=module_build.json(db_session, show_tasks=False),

                  conf=conf,

              ),

          ])

@@ -28,7 +28,7 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.modules

  from module_build_service import build_logs

- from module_build_service.models import make_session, ModuleBuild, ComponentBuild

+ from module_build_service.models import make_db_session, ModuleBuild

  from module_build_service.utils.general import mmd_to_str, load_mmd

  

  
@@ -41,13 +41,14 @@ 

          mmd = mmd.copy("testmodule", "1")

          scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77"

          clean_database()

-         with make_session(conf) as session:

+         with make_db_session(conf) as session:

              ModuleBuild.create(

                  session, conf, "testmodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

  

      def teardown_method(self, test_method):

          try:

-             path = build_logs.path(1)

+             with make_db_session(conf) as db_session:

+                 path = build_logs.path(db_session, 1)

              os.remove(path)

          except Exception:

              pass
@@ -90,7 +91,7 @@ 

              msg_id=None, module_build_id=2, module_build_state="init"

          )

  

-         self.fn(config=conf, session=db_session, msg=msg)

+         self.fn(config=conf, db_session=db_session, msg=msg)

  

          build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the wait state
@@ -122,18 +123,18 @@ 

  

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

-     def test_init_scm_not_available(self, get_build_arches, mocked_scm):

-         def mocked_scm_get_latest():

-             raise RuntimeError("Failed in mocked_scm_get_latest")

- 

+     def test_init_scm_not_available(self, get_build_arches, mocked_scm, db_session):

          FakeSCM(

-             mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")

-         mocked_scm.return_value.get_latest = mocked_scm_get_latest

+             mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4",

+             get_latest_raise=True,

+             get_latest_error=RuntimeError("Failed in mocked_scm_get_latest")

+         )

+ 

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=2, module_build_state="init")

-         with make_session(conf) as session:

-             self.fn(config=conf, session=session, msg=msg)

-         build = ModuleBuild.query.filter_by(id=2).one()

+         self.fn(config=conf, db_session=db_session, msg=msg)

+ 

+         build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the failed state

          # since the git server is not available

          assert build.state == 4, build.state
@@ -145,24 +146,25 @@ 

      )

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

-     def test_init_includedmodule(self, get_build_arches, mocked_scm, mocked_mod_allow_repo):

+     def test_init_includedmodule(

+         self, get_build_arches, mocked_scm, mocked_mod_allow_repo, db_session

+     ):

          FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])

          includedmodules_yml_path = read_staged_data("includedmodules")

          mmd = load_mmd(includedmodules_yml_path)

          # Set the name and stream

          mmd = mmd.copy("includedmodules", "1")

          scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886"

-         with make_session(conf) as session:

-             ModuleBuild.create(

-                 session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

-             msg = module_build_service.messaging.MBSModule(

-                 msg_id=None, module_build_id=3, module_build_state="init")

-             self.fn(config=conf, session=session, msg=msg)

-         build = ModuleBuild.query.filter_by(id=3).one()

+         ModuleBuild.create(

+             db_session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

+         msg = module_build_service.messaging.MBSModule(

+             msg_id=None, module_build_id=3, module_build_state="init")

+         self.fn(config=conf, db_session=db_session, msg=msg)

+         build = ModuleBuild.get_by_id(db_session, 3)

          assert build.state == 1

          assert build.name == "includemodule"

          batches = {}

-         for comp_build in ComponentBuild.query.filter_by(module_id=3).all():

+         for comp_build in build.component_builds:

              batches[comp_build.package] = comp_build.batch

          assert batches["perl-List-Compare"] == 2

          assert batches["perl-Tangerine"] == 2
@@ -183,7 +185,7 @@ 

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

      def test_init_when_get_latest_raises(

-             self, get_build_arches, mocked_scm, mocked_from_module_event):

+             self, get_build_arches, mocked_scm, mocked_from_module_event, db_session):

          FakeSCM(

              mocked_scm,

              "testmodule",
@@ -193,12 +195,13 @@ 

          )

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=2, module_build_state="init")

-         with make_session(conf) as session:

-             build = session.query(ModuleBuild).filter_by(id=2).one()

-             mocked_from_module_event.return_value = build

-             self.fn(config=conf, session=session, msg=msg)

-             # Query the database again to make sure the build object is updated

-             session.refresh(build)

-             # Make sure the module entered the failed state

-             assert build.state == 4, build.state

-             assert "Failed to get the latest commit for" in build.state_reason

+         build = ModuleBuild.get_by_id(db_session, 2)

+         mocked_from_module_event.return_value = build

+ 

+         self.fn(config=conf, db_session=db_session, msg=msg)

+ 

+         # Query the database again to make sure the build object is updated

+         db_session.refresh(build)

+         # Make sure the module entered the failed state

+         assert build.state == 4, build.state

+         assert "Failed to get the latest commit for" in build.state_reason

@@ -27,7 +27,7 @@ 

  import os

  import koji

  import pytest

- from tests import conf, db, scheduler_init_data, read_staged_data

+ from tests import conf, scheduler_init_data, read_staged_data

  import module_build_service.resolver

  from module_build_service import build_logs, Modulemd

  from module_build_service.utils.general import load_mmd
@@ -44,7 +44,8 @@ 

  

      def teardown_method(self, test_method):

          try:

-             path = build_logs.path(1)

+             with module_build_service.models.make_db_session(conf) as db_session:

+                 path = build_logs.path(db_session, 1)

              os.remove(path)

          except Exception:

              pass
@@ -79,8 +80,8 @@ 

  

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=1, module_build_state="some state")

-         with patch.object(module_build_service.resolver, "system_resolver"):

-             self.fn(config=self.config, session=self.session, msg=msg)

+         with patch("module_build_service.resolver.GenericResolver.create"):

+             self.fn(config=self.config, db_session=self.session, msg=msg)

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -115,20 +116,19 @@ 

          resolver.backend = "db"

          resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             msg = module_build_service.messaging.MBSModule(

-                 msg_id=None, module_build_id=2, module_build_state="some state")

-             module_build_service.scheduler.handlers.modules.wait(

-                 config=conf, session=db_session, msg=msg)

-             koji_session.newRepo.assert_called_once_with("module-123-build")

+         generic_resolver.create.return_value = resolver

+         msg = module_build_service.messaging.MBSModule(

+             msg_id=None, module_build_id=2, module_build_state="some state")

+ 

+         module_build_service.scheduler.handlers.modules.wait(

+             config=conf, db_session=db_session, msg=msg)

+ 

+         koji_session.newRepo.assert_called_once_with("module-123-build")

  

          # When module-build-macros is reused, it still has to appear only

          # once in database.

-         builds_count = (

-             db.session.query(ComponentBuild)

-             .filter_by(package="module-build-macros", module_id=2)

-             .count()

-         )

+         builds_count = db_session.query(ComponentBuild).filter_by(

+             package="module-build-macros", module_id=2).count()

          assert builds_count == 1

  

      @patch(
@@ -164,12 +164,14 @@ 

          resolver.backend = "db"

          resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             msg = module_build_service.messaging.MBSModule(

-                 msg_id=None, module_build_id=2, module_build_state="some state")

-             module_build_service.scheduler.handlers.modules.wait(

-                 config=conf, session=db_session, msg=msg)

-             assert koji_session.newRepo.called

+         generic_resolver.create.return_value = resolver

+         msg = module_build_service.messaging.MBSModule(

+             msg_id=None, module_build_id=2, module_build_state="some state")

+ 

+         module_build_service.scheduler.handlers.modules.wait(

+             config=conf, db_session=db_session, msg=msg)

+ 

+         assert koji_session.newRepo.called

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -209,13 +211,15 @@ 

              "module-bootstrap-tag": [base_mmd]

          }

  

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             msg = module_build_service.messaging.MBSModule(

-                 msg_id=None, module_build_id=2, module_build_state="some state")

-             module_build_service.scheduler.handlers.modules.wait(

-                 config=conf, session=db_session, msg=msg)

-             module_build = ModuleBuild.query.filter_by(id=2).one()

-             assert module_build.cg_build_koji_tag == "modular-updates-candidate"

+         generic_resolver.create.return_value = resolver

+         msg = module_build_service.messaging.MBSModule(

+             msg_id=None, module_build_id=2, module_build_state="some state")

+ 

+         module_build_service.scheduler.handlers.modules.wait(

+             config=conf, db_session=db_session, msg=msg)

+ 

+         module_build = ModuleBuild.get_by_id(db_session, 2)

+         assert module_build.cg_build_koji_tag == "modular-updates-candidate"

  

      @pytest.mark.parametrize(

          "koji_cg_tag_build,expected_cg_koji_build_tag",
@@ -280,12 +284,12 @@ 

              "koji_cg_tag_build",

              new=koji_cg_tag_build,

          ):

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state"

-                 )

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db_session, msg=msg

-                 )

-                 module_build = ModuleBuild.query.filter_by(id=2).one()

-                 assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

+             generic_resolver.create.return_value = resolver

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state"

+             )

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, db_session=db_session, msg=msg

+             )

+             module_build = ModuleBuild.get_by_id(db_session, 2)

+             assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

@@ -22,7 +22,7 @@ 

  import pytest

  from mock import patch

  from module_build_service import models, conf

- from tests import reuse_component_init_data, db, clean_database

+ from tests import clean_database

  import mock

  import koji

  from module_build_service.scheduler.producer import MBSProducer
@@ -31,6 +31,7 @@ 

  from datetime import datetime, timedelta

  

  

+ @pytest.mark.usefixtures("reuse_component_init_data")

  @patch(

      "module_build_service.builder.GenericBuilder.default_buildroot_groups",

      return_value={"build": [], "srpm-build": []},
@@ -39,8 +40,6 @@ 

  @patch("module_build_service.builder.GenericBuilder.create_from_module")

  class TestPoller:

      def setup_method(self, test_method):

-         reuse_component_init_data()

- 

          self.p_read_config = patch(

              "koji.read_config",

              return_value={
@@ -58,7 +57,7 @@ 

      @pytest.mark.parametrize("fresh", [True, False])

      @patch("module_build_service.utils.batches.start_build_component")

      def test_process_paused_module_builds(

-         self, start_build_component, create_builder, global_consumer, dbg, fresh

+         self, start_build_component, create_builder, global_consumer, dbg, fresh, db_session

      ):

          """

          Tests general use-case of process_paused_module_builds.
@@ -72,14 +71,14 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

          # If fresh is set, then we simulate that activity just occurred 2 minutes ago on the build

          if fresh:

              module_build.time_modified = datetime.utcnow() - timedelta(minutes=2)

          else:

              module_build.time_modified = datetime.utcnow() - timedelta(days=5)

-         db.session.commit()

+         db_session.commit()

  

          # Poll :)

          hub = mock.MagicMock()
@@ -87,7 +86,7 @@ 

          poller.poll()

  

          # Refresh our module_build object.

-         module_build = models.ModuleBuild.query.get(3)

+         db_session.refresh(module_build)

  

          # If fresh is set, we expect the poller to not touch the module build since it's been less

          # than 10 minutes of inactivity
@@ -112,7 +111,7 @@ 

      @patch("module_build_service.utils.batches.start_build_component")

      def test_process_paused_module_builds_with_new_repo_task(

          self, start_build_component, create_builder, global_consumer, dbg, task_state,

-         expect_start_build_component

+         expect_start_build_component, db_session

      ):

          """

          Tests general use-case of process_paused_module_builds.
@@ -126,7 +125,7 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

          module_build.time_modified = datetime.utcnow() - timedelta(days=5)

          if task_state:
@@ -134,7 +133,7 @@ 

              koji_session.getTaskInfo.return_value = {"state": task_state}

              builder.koji_session = koji_session

              module_build.new_repo_task_id = 123

-         db.session.commit()

+         db_session.commit()

  

          # Poll :)

          hub = mock.MagicMock()
@@ -142,7 +141,7 @@ 

          poller.poll()

  

          # Refresh our module_build object.

-         module_build = models.ModuleBuild.query.get(3)

+         db_session.refresh(module_build)

  

          if expect_start_build_component:

              expected_state = koji.BUILD_STATES["BUILDING"]
@@ -160,7 +159,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_retrigger_new_repo_on_failure(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

          """

          Tests that we call koji_sesion.newRepo when newRepo task failed.
@@ -180,10 +179,10 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

          module_build.new_repo_task_id = 123456

-         db.session.commit()

+         db_session.commit()

  

          hub = mock.MagicMock()

          poller = MBSProducer(hub)
@@ -195,7 +194,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_trigger_new_repo_when_succeeded(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

          """

          Tests that we do not call koji_sesion.newRepo when newRepo task
@@ -216,23 +215,23 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

          module_build.new_repo_task_id = 123456

-         db.session.commit()

+         db_session.commit()

  

          hub = mock.MagicMock()

          poller = MBSProducer(hub)

          poller.poll()

  

          # Refresh our module_build object.

-         module_build = models.ModuleBuild.query.get(3)

+         db_session.refresh(module_build)

  

          assert not koji_session.newRepo.called

          assert module_build.new_repo_task_id == 123456

  

      def test_process_paused_module_builds_waiting_for_repo(

-         self, create_builder, global_consumer, dbg

+         self, create_builder, global_consumer, dbg, db_session

      ):

          """

          Tests that process_paused_module_builds does not start new batch
@@ -247,10 +246,10 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

          module_build.new_repo_task_id = 123456

-         db.session.commit()

+         db_session.commit()

  

          # Poll :)

          hub = mock.MagicMock()
@@ -258,7 +257,7 @@ 

          poller.poll()

  

          # Refresh our module_build object.

-         module_build = models.ModuleBuild.query.get(3)

+         db_session.refresh(module_build)

  

          # Components should not be in building state

          components = module_build.current_batch()
@@ -268,7 +267,7 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_old_build_targets_are_not_associated_with_any_module_builds(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

          consumer = mock.MagicMock()

          consumer.incoming = queue.Queue()
@@ -283,16 +282,16 @@ 

  

          hub = mock.MagicMock()

          poller = MBSProducer(hub)

-         poller.delete_old_koji_targets(conf, db.session)

+         poller.delete_old_koji_targets(conf, db_session)

  

          koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_dont_delete_base_module_build_target(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

  

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.
@@ -308,16 +307,16 @@ 

  

              hub = mock.MagicMock()

              poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf, db.session)

+             poller.delete_old_koji_targets(conf, db_session)

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_dont_delete_build_target_for_unfinished_module_builds(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

  

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.
@@ -331,31 +330,32 @@ 

          # should not be deleted.

          for state in ["init", "wait", "build"]:

              module_build.state = state

-             db.session.commit()

+             db_session.commit()

  

              hub = mock.MagicMock()

              poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf, db.session)

+             poller.delete_old_koji_targets(conf, db_session)

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_only_delete_build_target_with_allowed_koji_tag_prefix(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

-         module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()

-         module_build_3 = models.ModuleBuild.query.filter_by(id=3).one()

- 

+         module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.

          module_build_2.koji_tag = "module-tag1"

          module_build_2.state = models.BUILD_STATES["done"]

          # Ensure to exceed the koji_target_delete_time easily later for deletion

          module_build_2.time_completed = datetime.utcnow() - timedelta(hours=24)

+ 

+         module_build_3 = models.ModuleBuild.get_by_id(db_session, 3)

          module_build_3.koji_tag = "f28"

-         db.session.commit()

-         db.session.refresh(module_build_2)

-         db.session.refresh(module_build_3)

+ 

+         db_session.commit()

+         db_session.refresh(module_build_2)

+         db_session.refresh(module_build_3)

  

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.
@@ -372,7 +372,7 @@ 

              with patch.object(conf, "koji_target_delete_time", new=60):

                  hub = mock.MagicMock()

                  poller = MBSProducer(hub)

-                 poller.delete_old_koji_targets(conf, db.session)

+                 poller.delete_old_koji_targets(conf, db_session)

  

              koji_session.deleteBuildTarget.assert_called_once_with(1)

              koji_session.krb_login.assert_called_once()
@@ -380,17 +380,17 @@ 

      @patch.dict("sys.modules", krbV=mock.MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_cant_delete_build_target_if_not_reach_delete_time(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, global_consumer, dbg, db_session

      ):

-         module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()

- 

+         module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.

          module_build_2.koji_tag = "module-tag1"

          module_build_2.state = models.BUILD_STATES["done"]

          # Ensure to exceed the koji_target_delete_time easily later for deletion

          module_build_2.time_completed = datetime.utcnow() - timedelta(minutes=5)

-         db.session.commit()

-         db.session.refresh(module_build_2)

+ 

+         db_session.commit()

+         db_session.refresh(module_build_2)

  

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.
@@ -407,12 +407,14 @@ 

              # enough for test.

              hub = mock.MagicMock()

              poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf, db.session)

+             poller.delete_old_koji_targets(conf, db_session)

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @pytest.mark.parametrize("state", ["init", "wait"])

-     def test_process_waiting_module_build(self, create_builder, global_consumer, dbg, state):

+     def test_process_waiting_module_build(

+         self, create_builder, global_consumer, dbg, state, db_session

+     ):

          """ Test that processing old waiting module builds works. """

  

          consumer = mock.MagicMock()
@@ -424,27 +426,29 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.state = models.BUILD_STATES[state]

          original = datetime.utcnow() - timedelta(minutes=11)

          module_build.time_modified = original

-         db.session.commit()

-         db.session.refresh(module_build)

+ 

+         db_session.commit()

+         db_session.refresh(module_build)

  

          # Ensure the queue is empty before we start.

          assert consumer.incoming.qsize() == 0

  

          # Poll :)

-         poller.process_waiting_module_builds(db.session)

+         poller.process_waiting_module_builds(db_session)

  

          assert consumer.incoming.qsize() == 1

-         module_build = models.ModuleBuild.query.get(3)

+ 

+         db_session.refresh(module_build)

          # ensure the time_modified was changed.

          assert module_build.time_modified > original

  

      @pytest.mark.parametrize("state", ["init", "wait"])

      def test_process_waiting_module_build_not_old_enough(

-         self, create_builder, global_consumer, dbg, state

+         self, create_builder, global_consumer, dbg, state, db_session

      ):

          """ Test that we do not process young waiting builds. """

  
@@ -457,23 +461,26 @@ 

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".

-         module_build = models.ModuleBuild.query.get(3)

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.state = models.BUILD_STATES[state]

          original = datetime.utcnow() - timedelta(minutes=9)

          module_build.time_modified = original

-         db.session.commit()

-         db.session.refresh(module_build)

+ 

+         db_session.commit()

+         db_session.refresh(module_build)

  

          # Ensure the queue is empty before we start.

          assert consumer.incoming.qsize() == 0

  

          # Poll :)

-         poller.process_waiting_module_builds(db.session)

+         poller.process_waiting_module_builds(db_session)

  

          # Ensure we did *not* process the 9 minute-old build.

          assert consumer.incoming.qsize() == 0

  

-     def test_process_waiting_module_build_none_found(self, create_builder, global_consumer, dbg):

+     def test_process_waiting_module_build_none_found(

+         self, create_builder, global_consumer, dbg, db_session

+     ):

          """ Test nothing happens when no module builds are waiting. """

  

          consumer = mock.MagicMock()
@@ -487,7 +494,7 @@ 

          assert consumer.incoming.qsize() == 0

  

          # Poll :)

-         poller.process_waiting_module_builds(db.session)

+         poller.process_waiting_module_builds(db_session)

  

          # Ensure we did *not* process any of the non-waiting builds.

          assert consumer.incoming.qsize() == 0
@@ -634,21 +641,23 @@ 

      @pytest.mark.parametrize("btime", (True, False))

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_sync_koji_build_tags(

-         self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime

+         self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime,

+         db_session

      ):

-         module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()

- 

+         module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.

          module_build_2.koji_tag = "module-tag1"

          module_build_2.state = models.BUILD_STATES["build"]

          if btime:

              module_build_2.time_modified = datetime.utcnow() - timedelta(minutes=12)

+ 

          c = module_build_2.current_batch()[0]

          c.state = koji.BUILD_STATES["COMPLETE"]

          c.tagged_in_final = False

          c.tagged = False

-         db.session.commit()

-         db.session.refresh(module_build_2)

+ 

+         db_session.commit()

+         db_session.refresh(module_build_2)

  

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.
@@ -668,7 +677,9 @@ 

          poller = MBSProducer(hub)

  

          assert consumer.incoming.qsize() == 0

-         poller.sync_koji_build_tags(conf, db.session)

+ 

+         poller.sync_koji_build_tags(conf, db_session)

+ 

          assert consumer.incoming.qsize() == len(ret)

  

          expected_msg_tags = []
@@ -714,18 +725,18 @@ 

  

          mock_gw.return_value = greenwave_result

  

-         poller.poll_greenwave(conf, db.session)

+         poller.poll_greenwave(conf, db_session)

  

          mock_gw.assert_called_once()

-         module = models.ModuleBuild.query.filter_by(state=models.BUILD_STATES["ready"]).all()

+         modules = models.ModuleBuild.by_state(db_session, "ready")

  

          if greenwave_result:

-             assert len(module) == 2

-             assert set([m.id for m in module]) == {1, 2}

+             assert len(modules) == 2

+             assert set([m.id for m in modules]) == {1, 2}

          else:

-             assert len(module) == 1

-             assert module[0].id == 1

-             module = models.ModuleBuild.query.filter_by(state=models.BUILD_STATES["done"]).all()

-             assert len(module) == 1

-             assert module[0].id == 2

-             assert re.match("Gating failed.*", module[0].state_reason)

+             assert len(modules) == 1

+             assert modules[0].id == 1

+             modules = models.ModuleBuild.by_state(db_session, "done")

+             assert len(modules) == 1

+             assert modules[0].id == 2

+             assert re.match("Gating failed.*", modules[0].state_reason)

@@ -25,7 +25,8 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.repos

  import module_build_service.models

- from tests import conf, db, scheduler_init_data

+ from module_build_service.models import ComponentBuild

+ from tests import conf, scheduler_init_data

  

  

  class TestRepoDone:
@@ -39,7 +40,8 @@ 

          from_repo_done_event.return_value = None

          msg = module_build_service.messaging.KojiRepoChange(

              "no matches for this...", "2016-some-nonexistent-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             config=conf, db_session=db_session, msg=msg)

  

      @mock.patch(

          "module_build_service.builder.KojiModuleBuilder."
@@ -76,7 +78,8 @@ 

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             config=conf, db_session=db_session, msg=msg)

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -137,7 +140,8 @@ 

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             config=conf, db_session=db_session, msg=msg)

  

          finalizer.assert_called_once()

  
@@ -177,7 +181,8 @@ 

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             config=conf, db_session=db_session, msg=msg)

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -196,14 +201,17 @@ 

          complete or go to the next build batch.

          """

          scheduler_init_data(db_session, 1)

+ 

+         component_build = db_session.query(ComponentBuild).filter_by(package="tangerine").one()

+         component_build.tagged = False

+         db_session.commit()

+ 

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         component_build = (

-             module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one())

-         component_build.tagged = False

-         db.session.add(component_build)

-         db.session.commit()

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+ 

+         module_build_service.scheduler.handlers.repos.done(

+             config=conf, db_session=db_session, msg=msg)

+ 

          mock_log_info.assert_called_with(

              "Ignoring repo regen, because not all components are tagged."

          )
@@ -241,7 +249,8 @@ 

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             config=conf, db_session=db_session, msg=msg)

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

  

          assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

@@ -21,25 +21,24 @@ 

  # Written by Jan Kaluza <jkaluza@redhat.com>

  

  import mock

+ import pytest

  

  from mock import patch

  

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.repos

+ import module_build_service.scheduler.handlers.tags

  import module_build_service.models

- from tests import reuse_component_init_data

- from tests import conf, db

+ from tests import conf

  

  import koji

- import pytest

  

  

+ @pytest.mark.usefixtures("reuse_component_init_data")

  class TestTagTagged:

-     def setup_method(self, test_method):

-         reuse_component_init_data()

  

      @mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event")

-     def test_no_matching_module(self, from_tag_change_event):

+     def test_no_matching_module(self, from_tag_change_event, db_session):

          """ Test that when a tag msg hits us and we have no match,

          that we do nothing gracefully.

          """
@@ -47,9 +46,9 @@ 

          msg = module_build_service.messaging.KojiTagChange(

              "no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1")

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

-     def test_no_matching_artifact(self):

+     def test_no_matching_artifact(self, db_session):

          """ Test that when a tag msg hits us and we have no match,

          that we do nothing gracefully.

          """
@@ -60,7 +59,7 @@ 

              "artifact-1.2-1",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -68,7 +67,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo(self, create_builder, koji_get_session, dbg):

+     def test_newrepo(self, create_builder, koji_get_session, dbg, db_session):

          """

          Test that newRepo is called in the expected times.

          """
@@ -86,7 +85,7 @@ 

          }

          create_builder.return_value = builder

  

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

  

          # Set previous components as COMPLETE and tagged.

          module_build.batch = 1
@@ -102,7 +101,8 @@ 

              elif c.package == "perl-List-Compare":

                  c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"

              c.state = koji.BUILD_STATES["COMPLETE"]

-         db.session.commit()

+ 

+         db_session.commit()

  

          # Tag the first component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -112,7 +112,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg

+             config=conf, db_session=db_session, msg=msg

          )

          # Tag the first component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(
@@ -122,7 +122,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg

+             config=conf, db_session=db_session, msg=msg

          )

  

          # newRepo should not be called, because there are still components
@@ -137,7 +137,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg

+             config=conf, db_session=db_session, msg=msg

          )

  

          # newRepo should not be called, because the component has not been
@@ -152,15 +152,14 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should be called now - all components have been tagged.

          koji_session.newRepo.assert_called_once_with(

              "module-testmodule-master-20170219191323-c40c156c-build")

  

          # Refresh our module_build object.

-         db.session.expunge(module_build)

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.refresh(module_build)

  

          # newRepo task_id should be stored in database, so we can check its

          # status later in poller.
@@ -172,7 +171,9 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo_still_building_components(self, create_builder, koji_get_session, dbg):

+     def test_newrepo_still_building_components(

+         self, create_builder, koji_get_session, dbg, db_session

+     ):

          """

          Test that newRepo is called in the expected times.

          """
@@ -190,13 +191,14 @@ 

          }

          create_builder.return_value = builder

  

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

-         component = module_build_service.models.ComponentBuild.query.filter_by(

+         component = db_session.query(module_build_service.models.ComponentBuild).filter_by(

              package="perl-Tangerine", module_id=module_build.id).one()

          component.state = koji.BUILD_STATES["BUILDING"]

          component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"

-         db.session.commit()

+ 

+         db_session.commit()

  

          # Tag the perl-List-Compare component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -206,7 +208,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the perl-List-Compare component to final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -215,7 +217,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should not be called, because perl-List-Compare has not been

          # built yet.
@@ -227,7 +229,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg):

+     def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg, db_session):

          """

          Test that newRepo is called in the expected times.

          """
@@ -245,7 +247,7 @@ 

          }

          create_builder.return_value = builder

  

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

  

          # Set previous components as COMPLETE and tagged.

          module_build.batch = 1
@@ -255,15 +257,18 @@ 

              c.tagged_in_final = True

  

          module_build.batch = 2

-         component = module_build_service.models.ComponentBuild.query.filter_by(

+ 

+         component = db_session.query(module_build_service.models.ComponentBuild).filter_by(

              package="perl-Tangerine", module_id=module_build.id).one()

          component.state = koji.BUILD_STATES["FAILED"]

          component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"

-         component = module_build_service.models.ComponentBuild.query.filter_by(

+ 

+         component = db_session.query(module_build_service.models.ComponentBuild).filter_by(

              package="perl-List-Compare", module_id=module_build.id).one()

          component.state = koji.BUILD_STATES["COMPLETE"]

          component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"

-         db.session.commit()

+ 

+         db_session.commit()

  

          # Tag the perl-List-Compare component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -273,7 +278,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg

+             config=conf, db_session=db_session, msg=msg

          )

          # Tag the perl-List-Compare component to final tag.

          msg = module_build_service.messaging.KojiTagChange(
@@ -283,7 +288,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should be called now - all successfully built

          # components have been tagged.
@@ -291,8 +296,7 @@ 

              "module-testmodule-master-20170219191323-c40c156c-build")

  

          # Refresh our module_build object.

-         db.session.expunge(module_build)

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.refresh(module_build)

  

          # newRepo task_id should be stored in database, so we can check its

          # status later in poller.
@@ -304,7 +308,9 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo_multiple_batches_tagged(self, create_builder, koji_get_session, dbg):

+     def test_newrepo_multiple_batches_tagged(

+         self, create_builder, koji_get_session, dbg, db_session

+     ):

          """

          Test that newRepo is called just once and only when all components

          are tagged even if we tag components from the multiple batches in the
@@ -324,19 +330,21 @@ 

          }

          create_builder.return_value = builder

  

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

-         mbm = module_build_service.models.ComponentBuild.query.filter_by(

+ 

+         mbm = db_session.query(module_build_service.models.ComponentBuild).filter_by(

              module_id=3, package="module-build-macros").one()

          mbm.tagged = False

-         db.session.add(mbm)

+ 

          for c in module_build.current_batch():

              if c.package == "perl-Tangerine":

                  c.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"

              elif c.package == "perl-List-Compare":

                  c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"

              c.state = koji.BUILD_STATES["COMPLETE"]

-         db.session.commit()

+ 

+         db_session.commit()

  

          # Tag the first component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -346,7 +354,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the first component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -355,7 +363,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should not be called, because there are still components

          # to tag.
@@ -369,7 +377,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the second component to final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -378,7 +386,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should not be called, because there are still components

          # to tag.
@@ -392,7 +400,7 @@ 

              "module-build-macros-0.1-1.module+0+b0a1d1f7",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the component from first batch to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -401,15 +409,14 @@ 

              "module-build-macros-0.1-1.module+0+b0a1d1f7",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should be called now - all components have been tagged.

          koji_session.newRepo.assert_called_once_with(

              "module-testmodule-master-20170219191323-c40c156c-build")

  

          # Refresh our module_build object.

-         db.session.expunge(module_build)

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.refresh(module_build)

  

          # newRepo task_id should be stored in database, so we can check its

          # status later in poller.
@@ -421,7 +428,7 @@ 

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

-     def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg):

+     def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg, db_session):

          """

          Test the component.build_time_only is respected in tag handler.

          """
@@ -439,7 +446,7 @@ 

          }

          create_builder.return_value = builder

  

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

  

          # Set previous components as COMPLETE and tagged.

          module_build.batch = 1
@@ -451,18 +458,20 @@ 

              c.tagged_in_final = True

  

          module_build.batch = 2

-         component = module_build_service.models.ComponentBuild.query.filter_by(

+         component = db_session.query(module_build_service.models.ComponentBuild).filter_by(

              package="perl-Tangerine", module_id=module_build.id).one()

          component.state = koji.BUILD_STATES["COMPLETE"]

          component.build_time_only = True

          component.tagged = False

          component.tagged_in_final = False

          component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"

-         component = module_build_service.models.ComponentBuild.query.filter_by(

+ 

+         component = db_session.query(module_build_service.models.ComponentBuild).filter_by(

              package="perl-List-Compare", module_id=module_build.id).one()

          component.state = koji.BUILD_STATES["COMPLETE"]

          component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"

-         db.session.commit()

+ 

+         db_session.commit()

  

          # Tag the perl-Tangerine component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -472,7 +481,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          assert not koji_session.newRepo.called

          # Tag the perl-List-Compare component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -482,7 +491,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the perl-List-Compare component to final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -491,7 +500,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # newRepo should be called now - all successfully built

          # components have been tagged.
@@ -499,8 +508,7 @@ 

              "module-testmodule-master-20170219191323-c40c156c-build")

  

          # Refresh our module_build object.

-         db.session.expunge(module_build)

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.refresh(module_build)

  

          # newRepo task_id should be stored in database, so we can check its

          # status later in poller.
@@ -522,7 +530,7 @@ 

      @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      def test_newrepo_not_duplicated(

-         self, create_builder, koji_get_session, dbg, task_state, expect_new_repo

+         self, create_builder, koji_get_session, dbg, task_state, expect_new_repo, db_session

      ):

          """

          Test that newRepo is not called if a task is already in progress.
@@ -541,7 +549,7 @@ 

          }

          create_builder.return_value = builder

  

-         module_build = module_build_service.models.ModuleBuild.query.get(3)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

          assert module_build

  

          # Set previous components as COMPLETE and tagged.
@@ -562,7 +570,7 @@ 

          if task_state is not None:

              module_build.new_repo_task_id = 123456

  

-         db.session.commit()

+         db_session.commit()

  

          # Tag the first component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(
@@ -572,7 +580,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the first component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -581,7 +589,7 @@ 

              "perl-Tangerine-0.23-1.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the second component to the buildroot.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -590,7 +598,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

          # Tag the second component to the final tag.

          msg = module_build_service.messaging.KojiTagChange(

              "id",
@@ -599,7 +607,7 @@ 

              "perl-List-Compare-0.53-5.module+0+d027b723",

          )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, session=db.session, msg=msg)

+             config=conf, db_session=db_session, msg=msg)

  

          # All components are tagged, newRepo should be called if there are no active tasks.

          if expect_new_repo:
@@ -609,8 +617,7 @@ 

              assert not koji_session.newRepo.called

  

          # Refresh our module_build object.

-         db.session.expunge(module_build)

-         module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.refresh(module_build)

  

          # newRepo task_id should be stored in database, so we can check its

          # status later in poller.

@@ -24,8 +24,6 @@ 

  import json

  from mock import patch, Mock

  import pytest

- from module_build_service import conf

- from module_build_service.models import make_session

  from module_build_service.utils.greenwave import greenwave

  from tests import clean_database, make_module

  
@@ -36,7 +34,7 @@ 

          clean_database()

  

      @patch("module_build_service.utils.greenwave.requests")

-     def test_greenwave_query_decision(self, mock_requests):

+     def test_greenwave_query_decision(self, mock_requests, db_session):

          resp_status = 200

          resp_content = {

              "applicable_policies": ["osci_compose_modules"],
@@ -61,9 +59,8 @@ 

          response.status_code = resp_status

          mock_requests.post.return_value = response

  

-         with make_session(conf) as db_session:

-             fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

-             got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")

+         fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+         got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")

  

          assert got_response == resp_content

          assert json.loads(mock_requests.post.call_args_list[0][1]["data"]) == {
@@ -157,7 +154,7 @@ 

  

      @pytest.mark.parametrize("policies_satisfied", (True, False))

      @patch("module_build_service.utils.greenwave.requests")

-     def test_greenwave_check_gating(self, mock_requests, policies_satisfied):

+     def test_greenwave_check_gating(self, mock_requests, policies_satisfied, db_session):

          resp_status = 200

          policies_content = {

              "policies": [
@@ -179,8 +176,7 @@ 

          mock_requests.get.return_value = responses[0]

          mock_requests.post.side_effect = responses[1:]

  

-         with make_session(conf) as db_session:

-             fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

-             result = greenwave.check_gating(fake_build)

+         fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+         result = greenwave.check_gating(fake_build)

  

          assert result == policies_satisfied

file modified
+35 -33
@@ -20,7 +20,6 @@ 

  from mock import patch, Mock

  

  from module_build_service import conf

- from module_build_service.models import make_session

  from module_build_service.utils import ursine

  from tests import make_module, clean_database

  
@@ -130,28 +129,28 @@ 

          clean_database()

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):

-         session = ClientSession.return_value

+     def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession, db_session):

+         koji_session = ClientSession.return_value

  

          # No module koji_tag in ursine content yet. This will result in empty

          # ursine modulemds is returned.

-         session.getFullInheritance.return_value = [{"name": "tag-1.0-build"}]

-         session.getExternalRepoList.return_value = [{

+         koji_session.getFullInheritance.return_value = [{"name": "tag-1.0-build"}]

+         koji_session.getExternalRepoList.return_value = [{

              "external_repo_name": "tag-1.0-external-repo",

              "url": "http://example.com/repos/tag-4-build/latest/$arch/",

          }]

  

-         modulemds = ursine.get_modulemds_from_ursine_content("tag")

+         modulemds = ursine.get_modulemds_from_ursine_content(db_session, "tag")

          assert [] == modulemds

  

      @patch.object(conf, "koji_tag_prefixes", new=["module"])

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_modulemds(self, ClientSession):

-         session = ClientSession.return_value

+     def test_get_modulemds(self, ClientSession, db_session):

+         koji_session = ClientSession.return_value

  

          # Ensure to to get build tag for further query of ursine content.

          # For this test, the build tag is tag-4-build

-         session.getExternalRepoList.return_value = [{

+         koji_session.getExternalRepoList.return_value = [{

              "external_repo_name": "tag-1.0-external-repo",

              "url": "http://example.com/repos/tag-4-build/latest/$arch/",

          }]
@@ -169,7 +168,7 @@ 

                  ]

              raise ValueError("{} is not handled by test.".format(tag))

  

-         session.getFullInheritance.side_effect = mock_getFullInheritance

+         koji_session.getFullInheritance.side_effect = mock_getFullInheritance

  

          # Defaults to DB resolver, so create fake module builds and store them

          # into database to ensure they can be queried.
@@ -181,25 +180,24 @@ 

          # From the behavior of following code, the reason of the error is

          # mixing use of db.session and make_session, the latter one is called

          # from function ``get_modulemds_from_ursine_content``.

-         with make_session(conf) as db_session:

-             mmd_name1s2020c = make_module(

-                 db_session,

-                 "name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})

-             mmd_name2s2021c = make_module(

-                 db_session,

-                 "name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})

+         mmd_name1s2020c = make_module(

+             db_session,

+             "name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})

+         mmd_name2s2021c = make_module(

+             db_session,

+             "name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})

  

-             koji_tag = "tag"  # It's ok to use arbitrary tag name.

-             with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):

-                 modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)

+         koji_tag = "tag"  # It's ok to use arbitrary tag name.

+         with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):

+             modulemds = ursine.get_modulemds_from_ursine_content(db_session, koji_tag)

  

-             test_nsvcs = [item.get_nsvc() for item in modulemds]

-             test_nsvcs.sort()

+         test_nsvcs = [item.get_nsvc() for item in modulemds]

+         test_nsvcs.sort()

  

-             expected_nsvcs = [mmd_name1s2020c.mmd().get_nsvc(), mmd_name2s2021c.mmd().get_nsvc()]

-             expected_nsvcs.sort()

+         expected_nsvcs = [mmd_name1s2020c.mmd().get_nsvc(), mmd_name2s2021c.mmd().get_nsvc()]

+         expected_nsvcs.sort()

  

-         session.getExternalRepoList.assert_called_once_with(koji_tag)

+         koji_session.getExternalRepoList.assert_called_once_with(koji_tag)

          assert expected_nsvcs == test_nsvcs

  

  
@@ -216,7 +214,7 @@ 

          original_xmd = fake_mmd.get_xmd()

  

          with patch.object(ursine, "log") as log:

-             ursine.handle_stream_collision_modules(fake_mmd)

+             ursine.handle_stream_collision_modules(db_session, fake_mmd)

              assert 2 == log.info.call_count

              find_stream_collision_modules.assert_not_called()

  
@@ -241,7 +239,7 @@ 

          get_modulemds_from_ursine_content.return_value = []

  

          with patch.object(ursine, "log") as log:

-             ursine.handle_stream_collision_modules(fake_mmd)

+             ursine.handle_stream_collision_modules(db_session, fake_mmd)

              assert 2 == log.info.call_count

  

          # Ensure stream_collision_modules is set.
@@ -272,7 +270,7 @@ 

          }

          fake_mmd = make_module(db_session, "name1:s:2020:c", xmd=xmd, store_to_db=False)

  

-         def mock_get_ursine_modulemds(koji_tag):

+         def mock_get_ursine_modulemds(db_session, koji_tag):

              if koji_tag == "module-rhel-8.0-build":

                  return [

                      # This is the one
@@ -325,7 +323,7 @@ 

          koji_session = ClientSession.return_value

          koji_session.listTaggedRPMS.side_effect = mock_listTaggedRPMS

  

-         ursine.handle_stream_collision_modules(fake_mmd)

+         ursine.handle_stream_collision_modules(db_session, fake_mmd)

  

          xmd = fake_mmd.get_xmd()

          buildrequires = xmd["mbs"]["buildrequires"]
@@ -346,9 +344,11 @@ 

      """Test ursine.find_stream_collision_modules"""

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

-     def test_no_modulemds_found_from_ursine_content(self, get_modulemds_from_ursine_content):

+     def test_no_modulemds_found_from_ursine_content(

+         self, get_modulemds_from_ursine_content, db_session

+     ):

          get_modulemds_from_ursine_content.return_value = []

-         assert not ursine.find_stream_collision_modules({}, "koji_tag")

+         assert not ursine.find_stream_collision_modules(db_session, {}, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_no_collisions_found(self, get_modulemds_from_ursine_content, db_session):
@@ -358,7 +358,8 @@ 

              make_module(db_session, "modules:2:1:c2", store_to_db=False),

              make_module(db_session, "modulet:3:1:c3", store_to_db=False),

          ]

-         assert [] == ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")

+         assert [] == ursine.find_stream_collision_modules(

+             db_session, xmd_mbs_buildrequires, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_collision_modules_are_found(self, get_modulemds_from_ursine_content, db_session):
@@ -370,5 +371,6 @@ 

          ]

          get_modulemds_from_ursine_content.return_value = fake_modules

  

-         modules = ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")

+         modules = ursine.find_stream_collision_modules(

+             db_session, xmd_mbs_buildrequires, "koji_tag")

          assert [fake_modules[1].get_nsvc()] == modules

file modified
+284 -264
@@ -32,10 +32,8 @@ 

  from module_build_service import models, conf

  from module_build_service.errors import ProgrammingError, ValidationError, UnprocessableEntity

  from module_build_service.utils.general import load_mmd

+ from module_build_service.utils.submit import format_mmd

  from tests import (

-     reuse_component_init_data,

-     db,

-     reuse_shared_userspace_init_data,

      clean_database,

      init_data,

      scheduler_init_data,
@@ -92,34 +90,33 @@ 

          return commit_hash + sha1_hash[len(commit_hash):]

  

  

+ @pytest.mark.usefixtures("reuse_component_init_data")

  class TestUtilsComponentReuse:

-     def setup_method(self, test_method):

-         reuse_component_init_data()

- 

-     def teardown_method(self, test_method):

-         clean_database()

+     #

+     # def teardown_method(self, test_method):

+     #     clean_database()

  

      @pytest.mark.parametrize(

          "changed_component", ["perl-List-Compare", "perl-Tangerine", "tangerine", None]

      )

-     def test_get_reusable_component_different_component(self, changed_component):

-         second_module_build = models.ModuleBuild.query.filter_by(id=3).one()

+     def test_get_reusable_component_different_component(self, changed_component, db_session):

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          if changed_component:

              mmd = second_module_build.mmd()

              mmd.get_rpm_component("tangerine").set_ref("00ea1da4192a2030f9ae023de3b3143ed647bbab")

              second_module_build.modulemd = mmd_to_str(mmd)

-             second_module_changed_component = models.ComponentBuild.query.filter_by(

+             second_module_changed_component = db_session.query(models.ComponentBuild).filter_by(

                  package=changed_component, module_id=3).one()

              second_module_changed_component.ref = "00ea1da4192a2030f9ae023de3b3143ed647bbab"

-             db.session.add(second_module_changed_component)

-             db.session.commit()

+             db_session.add(second_module_changed_component)

+             db_session.commit()

  

          plc_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-List-Compare")

+             db_session, second_module_build, "perl-List-Compare")

          pt_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-Tangerine")

+             db_session, second_module_build, "perl-Tangerine")

          tangerine_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "tangerine")

+             db_session, second_module_build, "tangerine")

  

          if changed_component == "perl-List-Compare":

              # perl-Tangerine can be reused even though a component in its batch has changed
@@ -143,53 +140,61 @@ 

              assert pt_rv.package == "perl-Tangerine"

              assert tangerine_rv.package == "tangerine"

  

-     def test_get_reusable_component_different_rpm_macros(self):

-         second_module_build = models.ModuleBuild.query.filter_by(id=3).one()

+     def test_get_reusable_component_different_rpm_macros(self, db_session):

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = second_module_build.mmd()

          buildopts = Modulemd.Buildopts()

          buildopts.set_rpm_macros("%my_macro 1")

          mmd.set_buildopts(buildopts)

          second_module_build.modulemd = mmd_to_str(mmd)

-         db.session.commit()

+         db_session.commit()

  

          plc_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-List-Compare")

+             db_session, second_module_build, "perl-List-Compare")

          assert plc_rv is None

  

          pt_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-Tangerine")

+             db_session, second_module_build, "perl-Tangerine")

          assert pt_rv is None

  

      @pytest.mark.parametrize("set_current_arch", [True, False])

      @pytest.mark.parametrize("set_database_arch", [True, False])

-     def test_get_reusable_component_different_arches(self, set_database_arch, set_current_arch):

-         second_module_build = models.ModuleBuild.query.filter_by(id=3).one()

+     def test_get_reusable_component_different_arches(

+         self, set_database_arch, set_current_arch, db_session

+     ):

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

+ 

          if set_current_arch:  # set architecture for current build

              mmd = second_module_build.mmd()

              component = mmd.get_rpm_component("tangerine")

              component.reset_arches()

              component.add_restricted_arch("i686")

              second_module_build.modulemd = mmd_to_str(mmd)

+             db_session.commit()

+ 

          if set_database_arch:  # set architecture for build in database

-             second_module_changed_component = models.ComponentBuild.query.filter_by(

+             second_module_changed_component = db_session.query(models.ComponentBuild).filter_by(

                  package="tangerine", module_id=2).one()

              mmd = second_module_changed_component.module_build.mmd()

              component = mmd.get_rpm_component("tangerine")

              component.reset_arches()

              component.add_restricted_arch("i686")

              second_module_changed_component.module_build.modulemd = mmd_to_str(mmd)

-             db.session.add(second_module_changed_component)

-             db.session.commit()

+             db_session.commit()

  

          tangerine = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "tangerine")

+             db_session, second_module_build, "tangerine")

          assert bool(tangerine is None) != bool(set_current_arch == set_database_arch)

  

      @pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys())

-     def test_get_reusable_component_different_buildrequires_hash(self, rebuild_strategy):

-         first_module_build = models.ModuleBuild.query.filter_by(id=2).one()

+     def test_get_reusable_component_different_buildrequires_hash(

+         self, rebuild_strategy, db_session

+     ):

+         first_module_build = models.ModuleBuild.get_by_id(db_session, 2)

          first_module_build.rebuild_strategy = rebuild_strategy

-         second_module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.commit()

+ 

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = second_module_build.mmd()

          xmd = mmd.get_xmd()

          xmd["mbs"]["buildrequires"]["platform"]["ref"] = "da39a3ee5e6b4b0d3255bfef95601890afd80709"
@@ -197,14 +202,14 @@ 

          second_module_build.modulemd = mmd_to_str(mmd)

          second_module_build.ref_build_context = "37c6c57bedf4305ef41249c1794760b5cb8fad17"

          second_module_build.rebuild_strategy = rebuild_strategy

-         db.session.commit()

+         db_session.commit()

  

          plc_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-List-Compare")

+             db_session, second_module_build, "perl-List-Compare")

          pt_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-Tangerine")

+             db_session, second_module_build, "perl-Tangerine")

          tangerine_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "tangerine")

+             db_session, second_module_build, "tangerine")

  

          if rebuild_strategy == "only-changed":

              assert plc_rv is not None
@@ -216,10 +221,14 @@ 

              assert tangerine_rv is None

  

      @pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys())

-     def test_get_reusable_component_different_buildrequires_stream(self, rebuild_strategy):

-         first_module_build = models.ModuleBuild.query.filter_by(id=2).one()

+     def test_get_reusable_component_different_buildrequires_stream(

+         self, rebuild_strategy, db_session

+     ):

+         first_module_build = models.ModuleBuild.get_by_id(db_session, 2)

          first_module_build.rebuild_strategy = rebuild_strategy

-         second_module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         db_session.commit()

+ 

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = second_module_build.mmd()

          xmd = mmd.get_xmd()

          xmd["mbs"]["buildrequires"]["platform"]["stream"] = "different"
@@ -227,21 +236,21 @@ 

          second_module_build.modulemd = mmd_to_str(mmd)

          second_module_build.build_context = "37c6c57bedf4305ef41249c1794760b5cb8fad17"

          second_module_build.rebuild_strategy = rebuild_strategy

-         db.session.commit()

+         db_session.commit()

  

          plc_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-List-Compare")

+             db_session, second_module_build, "perl-List-Compare")

          pt_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-Tangerine")

+             db_session, second_module_build, "perl-Tangerine")

          tangerine_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "tangerine")

+             db_session, second_module_build, "tangerine")

  

          assert plc_rv is None

          assert pt_rv is None

          assert tangerine_rv is None

  

-     def test_get_reusable_component_different_buildrequires(self):

-         second_module_build = models.ModuleBuild.query.filter_by(id=3).one()

+     def test_get_reusable_component_different_buildrequires(self, db_session):

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = second_module_build.mmd()

          mmd.get_dependencies()[0].add_buildtime_stream("some_module", "master")

          xmd = mmd.get_xmd()
@@ -255,22 +264,22 @@ 

          mmd.set_xmd(xmd)

          second_module_build.modulemd = mmd_to_str(mmd)

          second_module_build.ref_build_context = "37c6c57bedf4305ef41249c1794760b5cb8fad17"

-         db.session.commit()

+         db_session.commit()

  

          plc_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-List-Compare")

+             db_session, second_module_build, "perl-List-Compare")

          assert plc_rv is None

  

          pt_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "perl-Tangerine")

+             db_session, second_module_build, "perl-Tangerine")

          assert pt_rv is None

  

          tangerine_rv = module_build_service.utils.get_reusable_component(

-             db.session, second_module_build, "tangerine")

+             db_session, second_module_build, "tangerine")

          assert tangerine_rv is None

  

      @patch("module_build_service.utils.submit.submit_module_build")

-     def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit):

+     def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit, db_session):

          """

          Tests local module build from a yaml file with the skiptests option

  
@@ -279,7 +288,7 @@ 

                  inspect if it was called with correct arguments

          """

          module_dir = tempfile.mkdtemp()

-         module = models.ModuleBuild.query.filter_by(id=3).one()

+         module = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = module.mmd()

          modulemd_yaml = mmd_to_str(mmd)

          modulemd_file_path = path.join(module_dir, "testmodule.yaml")
@@ -293,10 +302,10 @@ 

          with open(modulemd_file_path, "rb") as fd:

              handle = FileStorage(fd)

              module_build_service.utils.submit_module_build_from_yaml(

-                 username, handle, {}, stream=stream, skiptests=True)

+                 db_session, username, handle, {}, stream=stream, skiptests=True)

              mock_submit_args = mock_submit.call_args[0]

-             username_arg = mock_submit_args[0]

-             mmd_arg = mock_submit_args[1]

+             username_arg = mock_submit_args[1]

+             mmd_arg = mock_submit_args[2]

              assert mmd_arg.get_stream_name() == stream

              assert "\n\n%__spec_check_pre exit 0\n" in mmd_arg.get_buildopts().get_rpm_macros()

              assert username_arg == username
@@ -311,22 +320,22 @@ 

          clean_database()

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_build_arches(self, ClientSession):

+     def test_get_build_arches(self, ClientSession, db_session):

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": "ppc64le"}

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

-         r = module_build_service.utils.get_build_arches(mmd, conf)

+         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

          assert r == ["ppc64le"]

  

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_build_arches_no_arch_set(self, ClientSession):

+     def test_get_build_arches_no_arch_set(self, ClientSession, db_session):

          """

          When no architecture is set in Koji tag, fallback to conf.arches.

          """

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": ""}

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

-         r = module_build_service.utils.get_build_arches(mmd, conf)

+         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

          assert set(r) == set(conf.arches)

  

      @patch(
@@ -334,17 +343,17 @@ 

          new_callable=mock.PropertyMock,

          return_value=["testmodule"],

      )

-     def test_get_build_arches_koji_tag_arches(self, cfg):

+     def test_get_build_arches_koji_tag_arches(self, cfg, db_session):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          xmd["mbs"]["koji_tag_arches"] = ["ppc64", "ppc64le"]

          mmd.set_xmd(xmd)

  

-         r = module_build_service.utils.get_build_arches(mmd, conf)

+         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

          assert r == ["ppc64", "ppc64le"]

  

      @patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})

-     def test_get_build_arches_base_module_override(self):

+     def test_get_build_arches_base_module_override(self, db_session):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          mbs_options = xmd["mbs"] if "mbs" in xmd.keys() else {}
@@ -352,11 +361,11 @@ 

          xmd["mbs"] = mbs_options

          mmd.set_xmd(xmd)

  

-         r = module_build_service.utils.get_build_arches(mmd, conf)

+         r = module_build_service.utils.get_build_arches(db_session, mmd, conf)

          assert r == ["x86_64", "i686"]

  

      @pytest.mark.parametrize("context", ["c1", None])

-     def test_import_mmd_contexts(self, context):

+     def test_import_mmd_contexts(self, context, db_session):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          mmd.set_context(context)

  
@@ -364,7 +373,7 @@ 

          xmd["mbs"]["koji_tag"] = "foo"

          mmd.set_xmd(xmd)

  

-         build, msgs = module_build_service.utils.import_mmd(db.session, mmd)

+         build, msgs = module_build_service.utils.import_mmd(db_session, mmd)

  

          mmd_context = build.mmd().get_context()

          if context:
@@ -374,16 +383,16 @@ 

              assert mmd_context == models.DEFAULT_MODULE_CONTEXT

              assert build.context == models.DEFAULT_MODULE_CONTEXT

  

-     def test_import_mmd_multiple_dependencies(self):

+     def test_import_mmd_multiple_dependencies(self, db_session):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          mmd.add_dependencies(mmd.get_dependencies()[0].copy())

  

          expected_error = "The imported module's dependencies list should contain just one element"

          with pytest.raises(UnprocessableEntity) as e:

-             module_build_service.utils.import_mmd(db.session, mmd)

+             module_build_service.utils.import_mmd(db_session, mmd)

              assert str(e.value) == expected_error

  

-     def test_import_mmd_no_xmd_buildrequires(self):

+     def test_import_mmd_no_xmd_buildrequires(self, db_session):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          del xmd["mbs"]["buildrequires"]
@@ -394,10 +403,10 @@ 

              'xmd["mbs"]["buildrequires"] dictionary is missing entries'

          )

          with pytest.raises(UnprocessableEntity) as e:

-             module_build_service.utils.import_mmd(db.session, mmd)

+             module_build_service.utils.import_mmd(db_session, mmd)

              assert str(e.value) == expected_error

  

-     def test_import_mmd_minimal_xmd_from_local_repository(self):

+     def test_import_mmd_minimal_xmd_from_local_repository(self, db_session):

          mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          xmd["mbs"] = {}
@@ -406,7 +415,7 @@ 

          xmd["mbs"]["commit"] = "unknown"

          mmd.set_xmd(xmd)

  

-         build, msgs = module_build_service.utils.import_mmd(db.session, mmd, False)

+         build, msgs = module_build_service.utils.import_mmd(db_session, mmd, False)

          assert build.name == mmd.get_module_name()

  

      @pytest.mark.parametrize(
@@ -420,7 +429,7 @@ 

              ("f-28", "fedora-28", "The disttag_marking cannot contain a dash"),

          ),

      )

-     def test_import_mmd_base_module(self, stream, disttag_marking, error_msg):

+     def test_import_mmd_base_module(self, stream, disttag_marking, error_msg, db_session):

          clean_database(add_platform_module=False)

          mmd = load_mmd(read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), stream)
@@ -432,25 +441,25 @@ 

  

          if error_msg:

              with pytest.raises(UnprocessableEntity, match=error_msg):

-                 module_build_service.utils.import_mmd(db.session, mmd)

+                 module_build_service.utils.import_mmd(db_session, mmd)

          else:

-             module_build_service.utils.import_mmd(db.session, mmd)

+             module_build_service.utils.import_mmd(db_session, mmd)

  

      def test_get_rpm_release_mse(self, db_session):

          init_data(contexts=True)

  

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

-         release_one = module_build_service.utils.get_rpm_release(build_one)

+         release_one = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release_one == "module+2+b8645bbb"

  

          build_two = models.ModuleBuild.get_by_id(db_session, 3)

-         release_two = module_build_service.utils.get_rpm_release(build_two)

+         release_two = module_build_service.utils.get_rpm_release(db_session, build_two)

          assert release_two == "module+2+17e35784"

  

      def test_get_rpm_release_platform_stream(self, db_session):

          scheduler_init_data(db_session, 1)

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

-         release = module_build_service.utils.get_rpm_release(build_one)

+         release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "module+f28+2+814cfa39"

  

      def test_get_rpm_release_platform_stream_override(self, db_session):
@@ -471,7 +480,7 @@ 

          db_session.commit()

  

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

-         release = module_build_service.utils.get_rpm_release(build_one)

+         release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "module+fedora28+2+814cfa39"

  

      @patch(
@@ -505,24 +514,24 @@ 

          db_session.add(build_one)

          db_session.commit()

  

-         release = module_build_service.utils.get_rpm_release(build_one)

+         release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "module+product12+2+814cfa39"

  

      def test_get_rpm_release_mse_scratch(self, db_session):

          init_data(contexts=True, scratch=True)

  

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

-         release_one = module_build_service.utils.get_rpm_release(build_one)

+         release_one = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release_one == "scrmod+2+b8645bbb"

  

          build_two = models.ModuleBuild.get_by_id(db_session, 3)

-         release_two = module_build_service.utils.get_rpm_release(build_two)

+         release_two = module_build_service.utils.get_rpm_release(db_session, build_two)

          assert release_two == "scrmod+2+17e35784"

  

      def test_get_rpm_release_platform_stream_scratch(self, db_session):

          scheduler_init_data(db_session, 1, scratch=True)

          build_one = models.ModuleBuild.get_by_id(db_session, 2)

-         release = module_build_service.utils.get_rpm_release(build_one)

+         release = module_build_service.utils.get_rpm_release(db_session, build_one)

          assert release == "scrmod+f28+2+814cfa39"

  

      @patch("module_build_service.utils.submit.get_build_arches")
@@ -596,6 +605,7 @@ 

          mmd_xmd = mmd.get_xmd()

          assert mmd_xmd == xmd

  

+     @pytest.mark.usefixtures("reuse_shared_userspace_init_data")

      def test_get_reusable_component_shared_userspace_ordering(self, db_session):

          """

          For modules with lot of components per batch, there is big chance that
@@ -603,9 +613,8 @@ 

          current `new_module`. In this case, reuse code should still be able to

          reuse the components.

          """

-         reuse_shared_userspace_init_data()

          new_module = models.ModuleBuild.get_by_id(db_session, 3)

-         rv = module_build_service.utils.get_reusable_component(db.session, new_module, "llvm")

+         rv = module_build_service.utils.get_reusable_component(db_session, new_module, "llvm")

          assert rv.package == "llvm"

  

      def test_validate_koji_tag_wrong_tag_arg_during_programming(self):
@@ -714,85 +723,91 @@ 

              assert str(cm.value).endswith(" No value provided.") is True

  

      @patch("module_build_service.scm.SCM")

-     def test_record_component_builds_duplicate_components(self, mocked_scm):

-         with app.app_context():

-             clean_database()

-             mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

-             mocked_scm.return_value.get_latest.side_effect = [

-                 "4ceea43add2366d8b8c5a622a2fb563b625b9abf",

-                 "fbed359411a1baa08d4a88e0d12d426fbf8f602c",

-             ]

+     def test_record_component_builds_duplicate_components(self, mocked_scm, db_session):

+         clean_database()

+ 

+         # Mock for format_mmd to get components' latest ref

+         mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

+         mocked_scm.return_value.get_latest.side_effect = [

+             "4ceea43add2366d8b8c5a622a2fb563b625b9abf",

+             "fbed359411a1baa08d4a88e0d12d426fbf8f602c",

+         ]

+ 

+         mmd = load_mmd(read_staged_data("testmodule"))

+         mmd = mmd.copy("testmodule-variant", "master")

+         module_build = module_build_service.models.ModuleBuild()

+         module_build.name = "testmodule-variant"

+         module_build.stream = "master"

+         module_build.version = 20170109091357

+         module_build.state = models.BUILD_STATES["init"]

+         module_build.scmurl = \

+             "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79"

+         module_build.batch = 1

+         module_build.owner = "Tom Brady"

+         module_build.time_submitted = datetime(2017, 2, 15, 16, 8, 18)

+         module_build.time_modified = datetime(2017, 2, 15, 16, 19, 35)

+         module_build.rebuild_strategy = "changed-and-after"

+         module_build.modulemd = mmd_to_str(mmd)

+         db_session.add(module_build)

+         db_session.commit()

+         # Rename the the modulemd to include

+         mmd = mmd.copy("testmodule")

+         # Remove perl-Tangerine and tangerine from the modulemd to include so only one

+         # component conflicts

+         mmd.remove_rpm_component("perl-Tangerine")

+         mmd.remove_rpm_component("tangerine")

+ 

+         error_msg = (

+             'The included module "testmodule" in "testmodule-variant" have '

+             "the following conflicting components: perl-List-Compare"

+         )

+         format_mmd(mmd, module_build.scmurl)

+         with pytest.raises(UnprocessableEntity) as e:

+             module_build_service.utils.record_component_builds(

+                 db_session, mmd, module_build, main_mmd=module_build.mmd())

  

-             mmd = load_mmd(read_staged_data("testmodule"))

-             mmd = mmd.copy("testmodule-variant", "master")

-             module_build = module_build_service.models.ModuleBuild()

-             module_build.name = "testmodule-variant"

-             module_build.stream = "master"

-             module_build.version = 20170109091357

-             module_build.state = models.BUILD_STATES["init"]

-             module_build.scmurl = \

-                 "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79"

-             module_build.batch = 1

-             module_build.owner = "Tom Brady"

-             module_build.time_submitted = datetime(2017, 2, 15, 16, 8, 18)

-             module_build.time_modified = datetime(2017, 2, 15, 16, 19, 35)

-             module_build.rebuild_strategy = "changed-and-after"

-             module_build.modulemd = mmd_to_str(mmd)

-             db.session.add(module_build)

-             db.session.commit()

-             # Rename the the modulemd to include

-             mmd = mmd.copy("testmodule")

-             # Remove perl-Tangerine and tangerine from the modulemd to include so only one

-             # component conflicts

-             mmd.remove_rpm_component("perl-Tangerine")

-             mmd.remove_rpm_component("tangerine")

- 

-             error_msg = (

-                 'The included module "testmodule" in "testmodule-variant" have '

-                 "the following conflicting components: perl-List-Compare"

-             )

-             with pytest.raises(UnprocessableEntity) as e:

-                 module_build_service.utils.record_component_builds(

-                     mmd, module_build, main_mmd=module_build.mmd())

- 

-             assert str(e.value) == error_msg

+         assert str(e.value) == error_msg

  

      @patch("module_build_service.scm.SCM")

-     def test_record_component_builds_set_weight(self, mocked_scm):

-         with app.app_context():

-             clean_database()

-             mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

-             mocked_scm.return_value.get_latest.side_effect = [

-                 "4ceea43add2366d8b8c5a622a2fb563b625b9abf",

-                 "fbed359411a1baa08d4a88e0d12d426fbf8f602c",

-                 "dbed259411a1baa08d4a88e0d12d426fbf8f6037",

-             ]

+     def test_record_component_builds_set_weight(self, mocked_scm, db_session):

+         clean_database()

  

-             mmd = load_mmd(read_staged_data("testmodule"))

-             # Set the module name and stream

-             mmd = mmd.copy("testmodule", "master")

-             module_build = module_build_service.models.ModuleBuild()

-             module_build.name = "testmodule"

-             module_build.stream = "master"

-             module_build.version = 20170109091357

-             module_build.state = models.BUILD_STATES["init"]

-             module_build.scmurl = \

-                 "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79"

-             module_build.batch = 1

-             module_build.owner = "Tom Brady"

-             module_build.time_submitted = datetime(2017, 2, 15, 16, 8, 18)

-             module_build.time_modified = datetime(2017, 2, 15, 16, 19, 35)

-             module_build.rebuild_strategy = "changed-and-after"

-             module_build.modulemd = mmd_to_str(mmd)

-             db.session.add(module_build)

-             db.session.commit()

- 

-             module_build_service.utils.record_component_builds(mmd, module_build)

- 

-             assert module_build.state == models.BUILD_STATES["init"]

-             db.session.refresh(module_build)

-             for c in module_build.component_builds:

-                 assert c.weight == 1.5

+         # Mock for format_mmd to get components' latest ref

+         mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"

+         mocked_scm.return_value.get_latest.side_effect = [

+             "4ceea43add2366d8b8c5a622a2fb563b625b9abf",

+             "fbed359411a1baa08d4a88e0d12d426fbf8f602c",

+             "dbed259411a1baa08d4a88e0d12d426fbf8f6037",

+         ]

+ 

+         mmd = load_mmd(read_staged_data("testmodule"))

+         # Set the module name and stream

+         mmd = mmd.copy("testmodule", "master")

+ 

+         module_build = module_build_service.models.ModuleBuild()

+         module_build.name = "testmodule"

+         module_build.stream = "master"

+         module_build.version = 20170109091357

+         module_build.state = models.BUILD_STATES["init"]

+         module_build.scmurl = \

+             "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79"

+         module_build.batch = 1

+         module_build.owner = "Tom Brady"

+         module_build.time_submitted = datetime(2017, 2, 15, 16, 8, 18)

+         module_build.time_modified = datetime(2017, 2, 15, 16, 19, 35)

+         module_build.rebuild_strategy = "changed-and-after"

+         module_build.modulemd = mmd_to_str(mmd)

+ 

+         db_session.add(module_build)

+         db_session.commit()

+ 

+         format_mmd(mmd, module_build.scmurl)

+         module_build_service.utils.record_component_builds(db_session, mmd, module_build)

+ 

+         assert module_build.state == models.BUILD_STATES["init"]

+         db_session.refresh(module_build)

+         for c in module_build.component_builds:

+             assert c.weight == 1.5

  

      @patch("module_build_service.scm.SCM")

      def test_format_mmd_arches(self, mocked_scm):
@@ -911,18 +926,17 @@ 

          assert v == 7000120180205135154

  

      @patch("module_build_service.utils.mse.generate_expanded_mmds")

-     def test_submit_build_new_mse_build(self, generate_expanded_mmds):

+     def test_submit_build_new_mse_build(self, generate_expanded_mmds, db_session):

          """

          Tests that finished build can be resubmitted in case the resubmitted

          build adds new MSE build (it means there are new expanded

          buildrequires).

          """

-         with models.make_session(conf) as db_session:

-             build = make_module(db_session, "foo:stream:0:c1", {}, {})

-             assert build.state == models.BUILD_STATES["ready"]

+         build = make_module(db_session, "foo:stream:0:c1", {}, {})

+         assert build.state == models.BUILD_STATES["ready"]

  

-             mmd1 = build.mmd()

-             mmd2 = build.mmd()

+         mmd1 = build.mmd()

+         mmd2 = build.mmd()

  

          mmd2.set_context("c2")

          generate_expanded_mmds.return_value = [mmd1, mmd2]
@@ -930,12 +944,12 @@ 

          mmd1_copy = mmd1.copy()

          mmd1_copy.set_xmd({})

  

-         builds = module_build_service.utils.submit_module_build("foo", mmd1_copy, {})

+         builds = module_build_service.utils.submit_module_build(db_session, "foo", mmd1_copy, {})

          ret = {b.mmd().get_context(): b.state for b in builds}

          assert ret == {"c1": models.BUILD_STATES["ready"], "c2": models.BUILD_STATES["init"]}

  

-         assert builds[0].siblings == [builds[1].id]

-         assert builds[1].siblings == [builds[0].id]

+         assert builds[0].siblings(db_session) == [builds[1].id]

+         assert builds[1].siblings(db_session) == [builds[0].id]

  

  

  class DummyModuleBuilder(GenericBuilder):
@@ -949,7 +963,8 @@ 

      TAGGED_COMPONENTS = []

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

+         self.db_session = db_session

          self.module_str = module

          self.tag_name = tag_name

          self.config = config
@@ -1009,21 +1024,21 @@ 

          pass

  

  

+ @pytest.mark.usefixtures("reuse_component_init_data")

  @patch(

      "module_build_service.builder.GenericBuilder.default_buildroot_groups",

      return_value={"build": [], "srpm-build": []},

  )

  class TestBatches:

      def setup_method(self, test_method):

-         reuse_component_init_data()

          GenericBuilder.register_backend_class(DummyModuleBuilder)

  

      def teardown_method(self, test_method):

-         clean_database()

+         # clean_database()

          DummyModuleBuilder.TAGGED_COMPONENTS = []

          GenericBuilder.register_backend_class(KojiModuleBuilder)

  

-     def test_start_next_batch_build_reuse(self, default_buildroot_groups):

+     def test_start_next_batch_build_reuse(self, default_buildroot_groups, db_session):

          """

          Tests that start_next_batch_build:

             1) Increments module.batch.
@@ -1033,12 +1048,12 @@ 

             5) Handling the further_work messages lead to proper tagging of

                reused components.

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 1

  

          builder = mock.MagicMock()

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1050,14 +1065,14 @@ 

          for msg in further_work:

              if type(msg) == module_build_service.messaging.KojiBuildChange:

                  assert msg.build_new_state == koji.BUILD_STATES["COMPLETE"]

-                 component_build = models.ComponentBuild.from_component_event(db.session, msg)

+                 component_build = models.ComponentBuild.from_component_event(db_session, msg)

                  assert component_build.state == koji.BUILD_STATES["BUILDING"]

  

          # When we handle these KojiBuildChange messages, MBS should tag all

          # the components just once.

          for msg in further_work:

              if type(msg) == module_build_service.messaging.KojiBuildChange:

-                 module_build_service.scheduler.handlers.components.complete(conf, db.session, msg)

+                 module_build_service.scheduler.handlers.components.complete(conf, db_session, msg)

  

          # Since we have reused all the components in the batch, there should

          # be fake KojiRepoChange message.
@@ -1067,7 +1082,9 @@ 

          assert len(DummyModuleBuilder.TAGGED_COMPONENTS) == 2

  

      @patch("module_build_service.utils.batches.start_build_component")

-     def test_start_next_batch_build_reuse_some(self, mock_sbc, default_buildroot_groups):

+     def test_start_next_batch_build_reuse_some(

+         self, mock_sbc, default_buildroot_groups, db_session

+     ):

          """

          Tests that start_next_batch_build:

             1) Increments module.batch.
@@ -1077,16 +1094,16 @@ 

             5) Handling the further_work messages lead to proper tagging of

                reused components.

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 1

-         plc_component = models.ComponentBuild.query.filter_by(

+         plc_component = db_session.query(models.ComponentBuild).filter_by(

              module_id=3, package="perl-List-Compare").one()

          plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd"

  

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1098,7 +1115,7 @@ 

          # to BUILDING, so KojiBuildChange message handler handles the change

          # properly.

          assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         component_build = models.ComponentBuild.from_component_event(db.session, further_work[0])

+         component_build = models.ComponentBuild.from_component_event(db_session, further_work[0])

          assert component_build.state == koji.BUILD_STATES["BUILDING"]

          assert component_build.package == "perl-Tangerine"

          assert component_build.reused_component_id is not None
@@ -1114,20 +1131,20 @@ 

          return_value="all",

      )

      def test_start_next_batch_build_rebuild_strategy_all(

-         self, mock_rm, mock_sbc, default_buildroot_groups

+         self, mock_rm, mock_sbc, default_buildroot_groups, db_session

      ):

          """

          Tests that start_next_batch_build can't reuse any components in the batch because the

          rebuild method is set to "all".

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.rebuild_strategy = "all"

          module_build.batch = 1

  

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1136,7 +1153,7 @@ 

          # Make sure that both components in the batch were submitted

          assert len(mock_sbc.mock_calls) == 2

  

-     def test_start_build_component_failed_state(self, default_buildroot_groups):

+     def test_start_build_component_failed_state(self, default_buildroot_groups, db_session):

          """

          Tests whether exception occured while building sets the state to failed

          """
@@ -1144,7 +1161,7 @@ 

          builder.build.side_effect = Exception("Something have gone terribly wrong")

          component = mock.MagicMock()

  

-         module_build_service.utils.batches.start_build_component(builder, component)

+         module_build_service.utils.batches.start_build_component(db_session, builder, component)

  

          assert component.state == koji.BUILD_STATES["FAILED"]

  
@@ -1155,7 +1172,7 @@ 

          return_value="only-changed",

      )

      def test_start_next_batch_build_rebuild_strategy_only_changed(

-         self, mock_rm, mock_sbc, default_buildroot_groups

+         self, mock_rm, mock_sbc, default_buildroot_groups, db_session

      ):

          """

          Tests that start_next_batch_build reuses all unchanged components in the batch because the
@@ -1163,18 +1180,18 @@ 

          2, and even though the other component in batch 2 changed and was rebuilt, the component

          in batch 3 can be reused.

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.rebuild_strategy = "only-changed"

          module_build.batch = 1

          # perl-List-Compare changed

-         plc_component = models.ComponentBuild.query.filter_by(

+         plc_component = db_session.query(models.ComponentBuild).filter_by(

              module_id=3, package="perl-List-Compare").one()

          plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd"

  

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

  

          # Batch number should increase

          assert module_build.batch == 2
@@ -1186,7 +1203,7 @@ 

          # to BUILDING, so KojiBuildChange message handler handles the change

          # properly.

          assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         component_build = models.ComponentBuild.from_component_event(db.session, further_work[0])

+         component_build = models.ComponentBuild.from_component_event(db_session, further_work[0])

          assert component_build.state == koji.BUILD_STATES["BUILDING"]

          assert component_build.package == "perl-Tangerine"

          assert component_build.reused_component_id is not None
@@ -1198,35 +1215,37 @@ 

  

          # Complete the build

          plc_component.state = koji.BUILD_STATES["COMPLETE"]

-         pt_component = models.ComponentBuild.query.filter_by(

+         pt_component = db_session.query(models.ComponentBuild).filter_by(

              module_id=3, package="perl-Tangerine").one()

          pt_component.state = koji.BUILD_STATES["COMPLETE"]

  

          # Start the next build batch

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

          # Batch number should increase

          assert module_build.batch == 3

          # Verify that tangerine was reused even though perl-Tangerine was rebuilt in the previous

          # batch

          assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         component_build = models.ComponentBuild.from_component_event(db.session, further_work[0])

+         component_build = models.ComponentBuild.from_component_event(db_session, further_work[0])

          assert component_build.state == koji.BUILD_STATES["BUILDING"]

          assert component_build.package == "tangerine"

          assert component_build.reused_component_id is not None

          mock_sbc.assert_not_called()

  

      @patch("module_build_service.utils.batches.start_build_component")

-     def test_start_next_batch_build_smart_scheduling(self, mock_sbc, default_buildroot_groups):

+     def test_start_next_batch_build_smart_scheduling(

+         self, mock_sbc, default_buildroot_groups, db_session

+     ):

          """

          Tests that components with the longest build time will be scheduled first

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 1

-         pt_component = models.ComponentBuild.query.filter_by(

+         pt_component = db_session.query(models.ComponentBuild).filter_by(

              module_id=3, package="perl-Tangerine").one()

          pt_component.ref = "6ceea46add2366d8b8c5a623b2fb563b625bfabe"

-         plc_component = models.ComponentBuild.query.filter_by(

+         plc_component = db_session.query(models.ComponentBuild).filter_by(

              module_id=3, package="perl-List-Compare").one()

          plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd"

  
@@ -1238,7 +1257,7 @@ 

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2
@@ -1252,26 +1271,29 @@ 

          assert plc_component.reused_component_id is None

  

          # Test the order of the scheduling

-         expected_calls = [mock.call(builder, plc_component), mock.call(builder, pt_component)]

+         expected_calls = [

+             mock.call(db_session, builder, plc_component),

+             mock.call(db_session, builder, pt_component)

+         ]

          assert mock_sbc.mock_calls == expected_calls

  

      @patch("module_build_service.utils.batches.start_build_component")

-     def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups):

+     def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups, db_session):

          """

          Tests that start_next_batch_build does not start new batch when

          there are unbuilt components in the current one.

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 2

  

          # The component was reused when the batch first started

          building_component = module_build.current_batch()[0]

          building_component.state = koji.BUILD_STATES["BUILDING"]

-         db.session.commit()

+         db_session.commit()

  

          builder = mock.MagicMock()

          further_work = module_build_service.utils.start_next_batch_build(

-             conf, module_build, db.session, builder)

+             conf, module_build, db_session, builder)

  

          # Batch number should not increase.

          assert module_build.batch == 2
@@ -1280,12 +1302,12 @@ 

          # No further work should be returned

          assert len(further_work) == 0

  

-     def test_start_next_batch_build_repo_building(self, default_buildroot_groups):

+     def test_start_next_batch_build_repo_building(self, default_buildroot_groups, db_session):

          """

          Test that start_next_batch_build does not start new batch when

          builder.buildroot_ready() returns False.

          """

-         module_build = models.ModuleBuild.query.filter_by(id=3).one()

+         module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.batch = 1

  

          builder = mock.MagicMock()
@@ -1310,60 +1332,57 @@ 

      def teardown_method(self):

          clean_database()

  

-     def test_load_local_builds_name(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             module_build_service.utils.load_local_builds("testmodule")

-             local_modules = models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_name(self, conf_system, conf_resultsdir, db_session):

+         module_build_service.utils.load_local_builds(db_session, "testmodule")

+         local_modules = models.ModuleBuild.local_modules(db_session)

  

-             assert len(local_modules) == 1

-             assert local_modules[0].koji_tag.endswith(

-                 "/module-testmodule-master-20170816080816/results")

+         assert len(local_modules) == 1

+         assert local_modules[0].koji_tag.endswith(

+             "/module-testmodule-master-20170816080816/results")

  

-     def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             module_build_service.utils.load_local_builds("testmodule:master")

-             local_modules = models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir, db_session):

+         module_build_service.utils.load_local_builds(db_session, "testmodule:master")

+         local_modules = models.ModuleBuild.local_modules(db_session)

  

-             assert len(local_modules) == 1

-             assert local_modules[0].koji_tag.endswith(

-                 "/module-testmodule-master-20170816080816/results")

+         assert len(local_modules) == 1

+         assert local_modules[0].koji_tag.endswith(

+             "/module-testmodule-master-20170816080816/results")

  

-     def test_load_local_builds_name_stream_non_existing(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             with pytest.raises(RuntimeError):

-                 module_build_service.utils.load_local_builds("testmodule:x")

-                 models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_name_stream_non_existing(

+         self, conf_system, conf_resultsdir, db_session

+     ):

+         with pytest.raises(RuntimeError):

+             module_build_service.utils.load_local_builds(db_session, "testmodule:x")

+             models.ModuleBuild.local_modules(db_session)

  

-     def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             module_build_service.utils.load_local_builds("testmodule:master:20170816080815")

-             local_modules = models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir, db_session):

+         module_build_service.utils.load_local_builds(db_session, "testmodule:master:20170816080815")

+         local_modules = models.ModuleBuild.local_modules(db_session)

  

-             assert len(local_modules) == 1

-             assert local_modules[0].koji_tag.endswith(

-                 "/module-testmodule-master-20170816080815/results")

+         assert len(local_modules) == 1

+         assert local_modules[0].koji_tag.endswith(

+             "/module-testmodule-master-20170816080815/results")

  

-     def test_load_local_builds_name_stream_version_non_existing(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             with pytest.raises(RuntimeError):

-                 module_build_service.utils.load_local_builds("testmodule:master:123")

-                 models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_name_stream_version_non_existing(

+         self, conf_system, conf_resultsdir, db_session

+     ):

+         with pytest.raises(RuntimeError):

+             module_build_service.utils.load_local_builds(db_session, "testmodule:master:123")

+             models.ModuleBuild.local_modules(db_session)

  

-     def test_load_local_builds_platform(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             module_build_service.utils.load_local_builds("platform")

-             local_modules = models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_platform(self, conf_system, conf_resultsdir, db_session):

+         module_build_service.utils.load_local_builds(db_session, "platform")

+         local_modules = models.ModuleBuild.local_modules(db_session)

  

-             assert len(local_modules) == 1

-             assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

+         assert len(local_modules) == 1

+         assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

  

-     def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir):

-         with app.app_context():

-             module_build_service.utils.load_local_builds("platform:f28")

-             local_modules = models.ModuleBuild.local_modules(db.session)

+     def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir, db_session):

+         module_build_service.utils.load_local_builds(db_session, "platform:f28")

+         local_modules = models.ModuleBuild.local_modules(db_session)

  

-             assert len(local_modules) == 1

-             assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

+         assert len(local_modules) == 1

+         assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

  

  

  class TestOfflineLocalBuilds:
@@ -1373,10 +1392,10 @@ 

      def teardown_method(self):

          clean_database()

  

-     def test_import_fake_base_module(self):

-         module_build_service.utils.import_fake_base_module("platform:foo:1:000000")

+     def test_import_fake_base_module(self, db_session):

+         module_build_service.utils.import_fake_base_module(db_session, "platform:foo:1:000000")

          module_build = models.ModuleBuild.get_build_from_nsvc(

-             db.session, "platform", "foo", 1, "000000")

+             db_session, "platform", "foo", 1, "000000")

          assert module_build

  

          mmd = module_build.mmd()
@@ -1394,7 +1413,7 @@ 

          assert set(mmd.get_profile_names()) == set(["buildroot", "srpm-buildroot"])

  

      @patch("module_build_service.utils.general.open", create=True, new_callable=mock.mock_open)

-     def test_import_builds_from_local_dnf_repos(self, patched_open):

+     def test_import_builds_from_local_dnf_repos(self, patched_open, db_session):

          with patch("dnf.Base") as dnf_base:

              repo = mock.MagicMock()

              repo.repofile = "/etc/yum.repos.d/foo.repo"
@@ -1404,27 +1423,28 @@ 

              base.repos = {"reponame": repo}

              patched_open.return_value.readlines.return_value = ("FOO=bar", "PLATFORM_ID=platform:x")

  

-             module_build_service.utils.import_builds_from_local_dnf_repos()

+             module_build_service.utils.import_builds_from_local_dnf_repos(db_session)

  

              base.read_all_repos.assert_called_once()

              repo.load.assert_called_once()

              repo.get_metadata_content.assert_called_once_with("modules")

  

              module_build = models.ModuleBuild.get_build_from_nsvc(

-                 db.session, "testmodule", "master", 20180205135154, "9c690d0e")

+                 db_session, "testmodule", "master", 20180205135154, "9c690d0e")

              assert module_build

              assert module_build.koji_tag == "repofile:///etc/yum.repos.d/foo.repo"

  

              module_build = models.ModuleBuild.get_build_from_nsvc(

-                 db.session, "platform", "x", 1, "000000")

+                 db_session, "platform", "x", 1, "000000")

              assert module_build

  

-     def test_import_builds_from_local_dnf_repos_platform_id(self):

+     def test_import_builds_from_local_dnf_repos_platform_id(self, db_session):

          with patch("dnf.Base"):

-             module_build_service.utils.import_builds_from_local_dnf_repos(platform_id="platform:y")

+             module_build_service.utils.import_builds_from_local_dnf_repos(

+                 db_session, platform_id="platform:y")

  

              module_build = models.ModuleBuild.get_build_from_nsvc(

-                 db.session, "platform", "y", 1, "000000")

+                 db_session, "platform", "y", 1, "000000")

              assert module_build

  

  
@@ -1436,34 +1456,34 @@ 

      def teardown_method(self, test_method):

          clean_database()

  

-     def test_get_reusable_module_when_reused_module_not_set(self):

-         module = models.ModuleBuild.query.filter_by(

+     def test_get_reusable_module_when_reused_module_not_set(self, db_session):

+         module = db_session.query(models.ModuleBuild).filter_by(

              name="nginx").order_by(models.ModuleBuild.id.desc()).first()

          module.state = models.BUILD_STATES["build"]

-         db.session.commit()

+         db_session.commit()

  

          assert not module.reused_module

  

          reusable_module = module_build_service.utils.get_reusable_module(

-             db.session, module)

+             db_session, module)

  

          assert module.reused_module

          assert reusable_module.id == module.reused_module_id

  

-     def test_get_reusable_module_when_reused_module_already_set(self):

-         modules = models.ModuleBuild.query.filter_by(

+     def test_get_reusable_module_when_reused_module_already_set(self, db_session):

+         modules = db_session.query(models.ModuleBuild).filter_by(

              name="nginx").order_by(models.ModuleBuild.id.desc()).limit(2).all()

          build_module = modules[0]

          reused_module = modules[1]

          build_module.state = models.BUILD_STATES["build"]

          build_module.reused_module_id = reused_module.id

-         db.session.commit()

+         db_session.commit()

  

          assert build_module.reused_module

          assert reused_module == build_module.reused_module

  

          reusable_module = module_build_service.utils.get_reusable_module(

-             db.session, build_module)

+             db_session, build_module)

  

          assert build_module.reused_module

          assert reusable_module.id == build_module.reused_module_id

@@ -24,7 +24,7 @@ 

  import module_build_service.utils

  from module_build_service import Modulemd, models

  from module_build_service.errors import StreamAmbigous

- from tests import db, clean_database, make_module, init_data, read_staged_data

+ from tests import clean_database, make_module, init_data, read_staged_data

  

  

  class TestUtilsModuleStreamExpansion:
@@ -34,19 +34,17 @@ 

      def teardown_method(self, test_method):

          clean_database()

  

-     def _get_mmds_required_by_module_recursively(self, module_build):

+     def _get_mmds_required_by_module_recursively(self, module_build, db_session):

          """

          Convenience wrapper around get_mmds_required_by_module_recursively

          returning the list with nsvc strings of modules returned by this the wrapped

          method.

          """

          mmd = module_build.mmd()

-         module_build_service.utils.expand_mse_streams(db.session, mmd)

-         modules = module_build_service.utils.get_mmds_required_by_module_recursively(mmd)

-         nsvcs = [

-             m.get_nsvc()

-             for m in modules

-         ]

+         module_build_service.utils.expand_mse_streams(db_session, mmd)

+         modules = module_build_service.utils.get_mmds_required_by_module_recursively(

+             db_session, mmd)

+         nsvcs = [m.get_nsvc() for m in modules]

          return nsvcs

  

      def _generate_default_modules(self, db_session):
@@ -164,10 +162,10 @@ 

          if stream_ambigous:

              with pytest.raises(StreamAmbigous):

                  module_build_service.utils.generate_expanded_mmds(

-                     db.session, module_build.mmd(), raise_if_stream_ambigous=True)

+                     db_session, module_build.mmd(), raise_if_stream_ambigous=True)

          else:

              module_build_service.utils.generate_expanded_mmds(

-                 db.session, module_build.mmd(), raise_if_stream_ambigous=True)

+                 db_session, module_build.mmd(), raise_if_stream_ambigous=True)

  

          # Check that if stream is ambigous and we define the stream, it does not raise

          # an exception.
@@ -177,13 +175,13 @@ 

                  name, stream = ns.split(":")

                  default_streams[name] = stream

              module_build_service.utils.generate_expanded_mmds(

-                 db.session,

+                 db_session,

                  module_build.mmd(),

                  raise_if_stream_ambigous=True,

                  default_streams=default_streams,

              )

  

-         mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())

+         mmds = module_build_service.utils.generate_expanded_mmds(db_session, module_build.mmd())

  

          buildrequires_per_mmd_xmd = set()

          buildrequires_per_mmd_buildrequires = set()
@@ -322,7 +320,7 @@ 

      def test_get_required_modules_simple(self, requires, build_requires, expected, db_session):

          module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

          self._generate_default_modules(db_session)

-         nsvcs = self._get_mmds_required_by_module_recursively(module_build)

+         nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)

          assert set(nsvcs) == set(expected)

  

      def _generate_default_modules_recursion(self, db_session):
@@ -367,7 +365,7 @@ 

      def test_get_required_modules_recursion(self, requires, build_requires, expected, db_session):

          module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

          self._generate_default_modules_recursion(db_session)

-         nsvcs = self._get_mmds_required_by_module_recursively(module_build)

+         nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)

          assert set(nsvcs) == set(expected)

  

      def _generate_default_modules_modules_multiple_stream_versions(self, db_session):
@@ -399,10 +397,10 @@ 

      ):

          module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

          self._generate_default_modules_modules_multiple_stream_versions(db_session)

-         nsvcs = self._get_mmds_required_by_module_recursively(module_build)

+         nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)

          assert set(nsvcs) == set(expected)

  

-     def test__get_base_module_mmds(self):

+     def test__get_base_module_mmds(self, db_session):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=True)

          mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2.yaml"))
@@ -415,7 +413,7 @@ 

          mmd.remove_dependencies(deps)

          mmd.add_dependencies(new_deps)

  

-         mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

+         mmds = module_build_service.utils.mse._get_base_module_mmds(db_session, mmd)

          expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"])

          # Verify no duplicates were returned before doing set operations

          assert len(mmds["ready"]) == len(expected)
@@ -440,7 +438,7 @@ 

  

          make_module(db_session, "platform:lp29.1.1:12:c11", {}, {}, virtual_streams=virtual_streams)

  

-         mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

+         mmds = module_build_service.utils.mse._get_base_module_mmds(db_session, mmd)

          if virtual_streams == ["f29"]:

              expected = set(

                  ["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0", "platform:lp29.1.1"])
@@ -458,15 +456,16 @@ 

          "module_build_service.config.Config.allow_only_compatible_base_modules",

          new_callable=PropertyMock, return_value=False

      )

-     def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg):

+     def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg, db_session):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=["foo28", "foo29", "foo30"])

  

          # Mark platform:foo28 as garbage to test that it is still considered as compatible.

-         platform = models.ModuleBuild.query.filter_by(name="platform", stream="foo28").first()

+         platform = db_session.query(models.ModuleBuild).filter_by(

+             name="platform", stream="foo28").first()

          platform.state = "garbage"

-         db.session.add(platform)

-         db.session.commit()

+         db_session.add(platform)

+         db_session.commit()

  

          mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2"))

          deps = mmd.get_dependencies()[0]
@@ -477,7 +476,7 @@ 

          mmd.remove_dependencies(deps)

          mmd.add_dependencies(new_deps)

  

-         mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

+         mmds = module_build_service.utils.mse._get_base_module_mmds(db_session, mmd)

          expected = {}

          expected["ready"] = set(["platform:foo29", "platform:foo30"])

          expected["garbage"] = set(["platform:foo28"])

@@ -36,7 +36,7 @@ 

  import re

  import sqlalchemy

  

- from tests import app, init_data, clean_database, reuse_component_init_data, staged_data_filename

+ from tests import app, init_data, clean_database, staged_data_filename

  from tests import read_staged_data

  from tests.test_scm import base_dir as scm_base_dir

  from module_build_service.errors import UnprocessableEntity
@@ -65,6 +65,7 @@ 

          commit=None,

          checkout_raise=False,

          get_latest_raise=False,

+         get_latest_error=None,

          branch="master",

      ):

          """
@@ -96,8 +97,8 @@ 

          self.mocked_scm.return_value.name = self.name

          self.mocked_scm.return_value.commit = self.commit

          if get_latest_raise:

-             self.mocked_scm.return_value.get_latest.side_effect = UnprocessableEntity(

-                 "Failed to get_latest commit")

+             self.mocked_scm.return_value.get_latest.side_effect = \

+                 get_latest_error or UnprocessableEntity("Failed to get_latest commit")

          else:

              self.mocked_scm.return_value.get_latest = self.get_latest

          self.mocked_scm.return_value.repository_root = "https://src.stg.fedoraproject.org/modules/"
@@ -245,8 +246,8 @@ 

          assert data["version"] == "2"

          assert data["virtual_streams"] == []

  

+     @pytest.mark.usefixtures("reuse_component_init_data")

      def test_query_build_with_br_verbose_mode(self):

-         reuse_component_init_data()

          rv = self.client.get("/module-build-service/1/module-builds/2?verbose=true")

          data = json.loads(rv.data)

          assert data["base_module_buildrequires"] == [{
@@ -464,6 +465,7 @@ 

                  for key, part in zip(nsvc_keys, nsvc_parts):

                      assert item[key] == part

  

+     @pytest.mark.usefixtures("reuse_component_init_data")

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_query_builds_with_binary_rpm(self, ClientSession):

          """
@@ -471,7 +473,6 @@ 

          which contain the rpm.

          """

          # update database with builds which contain koji tags.

-         reuse_component_init_data()

          mock_rpm_md = {"build_id": 1065871}

          mock_tags = [

              {"name": "module-testmodule-master-20170219191323-c40c156c"},
@@ -827,8 +828,8 @@ 

          }

          assert error == expected

  

+     @pytest.mark.usefixtures("reuse_component_init_data")

      def test_query_base_module_br_filters(self):

-         reuse_component_init_data()

          mmd = load_mmd(read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

          import_mmd(db.session, mmd)

This patch separates the use of database session in different MBS components
and do not mix them together.

In general, MBS components could be separated as the REST API (implemented
based on Flask) and non-REST API including the backend build workflow
(implemented as a fedmsg consumer on top of fedmsg-hub and running
independently) and library shared by them. As a result, there are two kind of
database session used in MBS, one is created and managed by Flask-SQLAlchemy,
and another one is created from SQLAclhemy Session API directly. The goal of
this patch is to make ensure session object is used properly in the right
place.

All the changes follow these rules:

  • REST API related code uses the session object db.session created and
    managed by Flask-SQLAlchemy.
  • Non-REST API related code uses the session object created with SQLAlchemy
    Session API. Function make_db_session does that.
  • Shared code does not created a new session object as much as possible.
    Instead, it accepts an argument db_session.

The first two rules are applicable to tests as well.

Major changes:

  • Switch tests back to run with a file-based SQLite database.
  • make_session is renamed to make_db_session and SQLAlchemy connection pool
    options are applied for PostgreSQL backend.
  • Frontend Flask related code uses db.session
  • Shared code by REST API and backend build workflow accepts SQLAlchemy session
    object as an argument. For example, resolver class is constructed with a
    database session, and some functions accepts an argument for database session.
  • Build workflow related code use session object returned from make_db_session
    and ensure db.session is not used.
  • Only tests for views use db.session, and other tests use db_session fixture
    to access database.
  • All argument name session, that is for database access, are renamed to
    db_session.

Signed-off-by: Chenxiong Qi cqi@redhat.com

Build #243 failed (commit: a5b16a209dc55584a30b4f437228cd8ee078be84).
Rebase or make new commits to rebuild.

rebased onto 2a945659a74de72bbb3ede46efaaa75e7fd8ef50

4 years ago

@cqi could you please rebase this?

rebased onto 98b560228e40da7ce2035bab762207f049b597a4

4 years ago

Should this be db_session instead of db.session?

multiple threads => multi-threaded

I didn't realize this was in this method. After this PR, it'd be nice to see if any of the MBS code relies on this.

Why are you committing here? Is it just to close the transaction?

Either way, you'll need to use db_session and not db.session.

Did you add this to address this in this PR and forgot or is this for a separate PR? Good catch though.

I like that you moved this out of record_component_builds, but a separate commit would have been nice explaining this change. Could you please explain what motivated this change?

drop all data from database => dropping all the data in the database

recreate all tables => recreating all the tables

Can you convert the second sentence to a TODO or FIXME comment? You could also file an issue to fix this and reference it here.

These comments are great, but their probably better left in the commit message or PR description. I say this because they are only relevant when reviewing the change.

Does it make sense to apply this patch on the class to avoid needing to copy this on every test method? It seems like the return_value is the same throughout the class.

I left a few comments, but this looks great. Nice job!

Once my comments are addressed, this is good to merge.

rebased onto a4c1814e21519de81ef61cf2cdac6a4da352b61d

4 years ago

@mprahl Thanks for taking time to review this big PR.

Why are you committing here? Is it just to close the transaction?

This commit is actually not required for ModuleBuild.siblings. I removed it in updated version of this PR. While debugging the test_submit_build test this time, I found out a query inside FakeKojiModuleBuilder works on db.session rather than the passed-in db_session, which causes the query is not committed and blocks DML for next test. And siblings query does not blocks anything. I doult this unnecessary db.session.commit commits that query occasionally, that confused me.

Did you add this to address this in this PR and forgot or is this for a separate PR? Good catch though.

I though this could be done in a separate PR. :)

I like that you moved this out of record_component_builds, but a separate commit would have been nice explaining this change. Could you please explain what motivated this change?

There are some reasons when I was going to move format_mmd outside

  • Let one function just do one thing. record_component_builds is responsible for recording a module's components into database, it should not format the module metadata.
  • Easier to understand the logic. Moving format_mmd outside, it's easy to know mmd is required to be formatted and what following code require a formatted mmd. The original code hides this information.
  • The original argument session usage conflicts, which is optional for record_component_builds but format_mmd requires that. Every time when I read the code, I always feel the behaivor is strange.

Can you convert the second sentence to a TODO or FIXME comment? You could also file an issue to fix this and reference it here.

Sure. I added this comment mainly because the ModuleBuild.siblings. If we switch to autocommit=True mode, this should not be a problem I think. I'm not sure if it is worth doing the switch and that also mean lots of code changes to the code base.

These comments are great, but their probably better left in the commit message or PR description. I say this because they are only relevant when reviewing the change.

+1. Updated the commit message.

Other comments are addressed. PTAL.

Could you please remove this commented out code? This was missed from the last review.

Build #261 failed (commit: a4c1814e21519de81ef61cf2cdac6a4da352b61d).
Rebase or make new commits to rebuild.

rebased onto 5eae374ea10fa52f0bc0f91ef0ac197ced218315

4 years ago

Could you please remove this commented out code? This was missed from the last review.

@mprahl Done. I also missed these lines as well. Also rebased.

@cqi this looks good to me, but there are merge conflicts again. Could you please rebase it? Feel free to merge it after the tests pass.

rebased onto 3878aff

4 years ago

Pull-Request has been merged by cqi

4 years ago
Changes Summary 54
+2 -1
file changed
conf/config.py
+3 -2
file changed
module_build_service/builder/KojiContentGenerator.py
+6 -4
file changed
module_build_service/builder/KojiModuleBuilder.py
+3 -2
file changed
module_build_service/builder/MockModuleBuilder.py
+14 -10
file changed
module_build_service/builder/base.py
+6 -6
file changed
module_build_service/logger.py
+61 -56
file changed
module_build_service/manage.py
+198 -132
file changed
module_build_service/models.py
+224 -231
file changed
module_build_service/resolver/DBResolver.py
+10 -7
file changed
module_build_service/resolver/MBSResolver.py
+0 -6
file changed
module_build_service/resolver/__init__.py
+2 -2
file changed
module_build_service/resolver/base.py
+2 -2
file changed
module_build_service/scheduler/__init__.py
+15 -14
file changed
module_build_service/scheduler/consumer.py
+2 -1
file changed
module_build_service/scheduler/default_modules.py
+17 -15
file changed
module_build_service/scheduler/handlers/components.py
+7 -6
file changed
module_build_service/scheduler/handlers/greenwave.py
+71 -59
file changed
module_build_service/scheduler/handlers/modules.py
+10 -8
file changed
module_build_service/scheduler/handlers/repos.py
+6 -6
file changed
module_build_service/scheduler/handlers/tags.py
+55 -50
file changed
module_build_service/scheduler/producer.py
+32 -17
file changed
module_build_service/utils/batches.py
+87 -85
file changed
module_build_service/utils/general.py
+27 -31
file changed
module_build_service/utils/mse.py
+17 -17
file changed
module_build_service/utils/reuse.py
+52 -55
file changed
module_build_service/utils/submit.py
+17 -11
file changed
module_build_service/utils/ursine.py
+19 -7
file changed
module_build_service/views.py
+116 -383
file changed
tests/__init__.py
+325 -6
file changed
tests/conftest.py
+186 -163
file changed
tests/test_build/test_build.py
+21 -20
file changed
tests/test_builder/test_base.py
+94 -57
file changed
tests/test_builder/test_koji.py
+70 -72
file changed
tests/test_builder/test_mock.py
+6 -3
file changed
tests/test_content_generator.py
+11 -11
file changed
tests/test_logger.py
+45 -45
file changed
tests/test_manage.py
+0 -44
file changed
tests/test_models/__init__.py
+122 -117
file changed
tests/test_models/test_models.py
+12 -12
file changed
tests/test_monitor.py
+57 -58
file changed
tests/test_resolver/test_db.py
+7 -12
file changed
tests/test_resolver/test_local.py
+48 -53
file changed
tests/test_resolver/test_mbs.py
+24 -24
file changed
tests/test_scheduler/test_greenwave.py
+35 -32
file changed
tests/test_scheduler/test_module_init.py
+41 -37
file changed
tests/test_scheduler/test_module_wait.py
+85 -74
file changed
tests/test_scheduler/test_poller.py
+21 -12
file changed
tests/test_scheduler/test_repo_done.py
+72 -65
file changed
tests/test_scheduler/test_tag_tagged.py
+6 -10
file changed
tests/test_utils/test_greenwave.py
+35 -33
file changed
tests/test_utils/test_ursine.py
+284 -264
file changed
tests/test_utils/test_utils.py
+22 -23
file changed
tests/test_utils/test_utils_mse.py
+7 -6
file changed
tests/test_views/test_views.py