#1300 WIP: Refactor the use of SQLAlchemy database session
Closed 4 years ago by cqi. Opened 4 years ago by cqi.
cqi/fm-orchestrator separate-sqlalchemy  into  master

@@ -161,13 +161,14 @@ 

      region = dogpile.cache.make_region().configure("dogpile.cache.memory")

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

          """

          :param owner: a string representing who kicked off the builds

          :param module: module_build_service.models.ModuleBuild instance.

          :param config: module_build_service.config.Config instance

          :param tag_name: name of tag for given module

          """

+         self.db_session = db_session

          self.owner = owner

          self.module_str = module.name

          self.module = module
@@ -231,68 +232,70 @@ 

          return ready

  

      @staticmethod

-     def _get_filtered_rpms_on_self_dep(module_build, filtered_rpms_of_dep):

+     def _get_filtered_rpms_on_self_dep(db_session, module_build, filtered_rpms_of_dep):

          # filtered_rpms will contain the NVRs of non-reusable component's RPMs

          filtered_rpms = list(set(filtered_rpms_of_dep))

-         with models.make_session(conf) as db_session:

-             # Get a module build that can be reused, which will likely be the

-             # build dep that is used since it relies on itself

-             reusable_module = get_reusable_module(db_session, module_build)

-             if not reusable_module:

-                 return filtered_rpms

-             koji_session = KojiModuleBuilder.get_session(conf, login=False)

-             # Get all the RPMs and builds of the reusable module in Koji

-             rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)

-             # Convert the list to a dict where each key is the build_id

-             builds = {build["build_id"]: build for build in builds}

-             # Create a mapping of package (SRPM) to the RPMs in NVR format

-             package_to_rpms = {}

-             for rpm in rpms:

-                 package = builds[rpm["build_id"]]["name"]

-                 if package not in package_to_rpms:

-                     package_to_rpms[package] = []

-                 package_to_rpms[package].append(kobo.rpmlib.make_nvr(rpm))

- 

-             components_in_module = [c.package for c in module_build.component_builds]

-             reusable_components = get_reusable_components(

-                 db_session,

-                 module_build,

-                 components_in_module,

-                 previous_module_build=reusable_module,

-             )

-             # Loop through all the reusable components to find if any of their RPMs are

-             # being filtered

-             for reusable_component in reusable_components:

-                 # reusable_component will be None if the component can't be reused

-                 if not reusable_component:

-                     continue

-                 # We must get the component name from the NVR and not from

-                 # reusable_component.package because macros such as those used

-                 # by SCLs can change the name of the underlying build

-                 component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]

  

-                 if component_name not in package_to_rpms:

-                     continue

+         # Get a module build that can be reused, which will likely be the

+         # build dep that is used since it relies on itself

+         reusable_module = get_reusable_module(db_session, module_build)

+         if not reusable_module:

+             return filtered_rpms

+ 

+         koji_session = KojiModuleBuilder.get_session(conf, login=False)

+         # Get all the RPMs and builds of the reusable module in Koji

+         rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)

+         # Convert the list to a dict where each key is the build_id

+         builds = {build["build_id"]: build for build in builds}

+         # Create a mapping of package (SRPM) to the RPMs in NVR format

+         package_to_rpms = {}

+         for rpm in rpms:

+             package = builds[rpm["build_id"]]["name"]

+             if package not in package_to_rpms:

+                 package_to_rpms[package] = []

+             package_to_rpms[package].append(kobo.rpmlib.make_nvr(rpm))

+ 

+         components_in_module = [c.package for c in module_build.component_builds]

+         reusable_components = get_reusable_components(

+             db_session,

+             module_build,

+             components_in_module,

+             previous_module_build=reusable_module,

+         )

+         # Loop through all the reusable components to find if any of their RPMs are

+         # being filtered

+         for reusable_component in reusable_components:

+             # reusable_component will be None if the component can't be reused

+             if not reusable_component:

+                 continue

+             # We must get the component name from the NVR and not from

+             # reusable_component.package because macros such as those used

+             # by SCLs can change the name of the underlying build

+             component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]

  

-                 # Loop through the RPMs associated with the reusable component

-                 for nvr in package_to_rpms[component_name]:

-                     parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

+             if component_name not in package_to_rpms:

+                 continue

+ 

+             # Loop through the RPMs associated with the reusable component

+             for nvr in package_to_rpms[component_name]:

+                 parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

+                 # Don't compare with the epoch

+                 parsed_nvr["epoch"] = None

+                 # Loop through all the filtered RPMs to find a match with the reusable

+                 # component's RPMs.

+                 for nvr2 in list(filtered_rpms):

+                     parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)

                      # Don't compare with the epoch

-                     parsed_nvr["epoch"] = None

-                     # Loop through all the filtered RPMs to find a match with the reusable

-                     # component's RPMs.

-                     for nvr2 in list(filtered_rpms):

-                         parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)

-                         # Don't compare with the epoch

-                         parsed_nvr2["epoch"] = None

-                         # Only remove the filter if we are going to reuse a component with

-                         # the same exact NVR

-                         if parsed_nvr == parsed_nvr2:

-                             filtered_rpms.remove(nvr2)

-                             # Since filtered_rpms was cast to a set and then back

-                             # to a list above, we know there won't be duplicate RPMS,

-                             # so we can just break here.

-                             break

+                     parsed_nvr2["epoch"] = None

+                     # Only remove the filter if we are going to reuse a component with

+                     # the same exact NVR

+                     if parsed_nvr == parsed_nvr2:

+                         filtered_rpms.remove(nvr2)

+                         # Since filtered_rpms was cast to a set and then back

+                         # to a list above, we know there won't be duplicate RPMS,

+                         # so we can just break here.

+                         break

+ 

          return filtered_rpms

  

      @staticmethod
@@ -302,7 +305,7 @@ 

          return "Conflicts: {name} = {epoch}:{version}-{release}".format(**parsed_nvr)

  

      @staticmethod

-     def get_disttag_srpm(disttag, module_build):

+     def get_disttag_srpm(session, disttag, module_build):

  

          # Taken from Karsten's create-distmacro-pkg.sh

          # - however removed any provides to system-release/redhat-release
@@ -322,10 +325,11 @@ 

          for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items():

              if req_data["filtered_rpms"]:

                  filter_conflicts.append("# Filtered rpms from %s module:" % req_name)

+ 

              # Check if the module depends on itself

              if req_name == module_build.name:

                  filtered_rpms = KojiModuleBuilder._get_filtered_rpms_on_self_dep(

-                     module_build, req_data["filtered_rpms"])

+                     session, module_build, req_data["filtered_rpms"])

              else:

                  filtered_rpms = req_data["filtered_rpms"]

              filter_conflicts.extend(map(KojiModuleBuilder.format_conflicts_line, filtered_rpms))
@@ -1255,23 +1259,22 @@ 

          return weights

  

      @classmethod

-     def get_built_rpms_in_module_build(cls, mmd):

+     def get_built_rpms_in_module_build(cls, session, mmd):

          """

          :param Modulemd mmd: Modulemd to get the built RPMs from.

          :return: list of NVRs

          """

-         with models.make_session(conf) as db_session:

-             build = models.ModuleBuild.get_build_from_nsvc(

-                 db_session,

-                 mmd.get_module_name(),

-                 mmd.get_stream_name(),

-                 mmd.get_version(),

-                 mmd.get_context()

-             )

-             koji_session = KojiModuleBuilder.get_session(conf, login=False)

-             rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

-             nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

-             return list(nvrs)

+         build = models.ModuleBuild.get_build_from_nsvc(

+             session,

+             mmd.get_module_name(),

+             mmd.get_stream_name(),

+             mmd.get_version(),

+             mmd.get_context()

+         )

+         koji_session = KojiModuleBuilder.get_session(conf, login=False)

+         rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

+         nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

+         return list(nvrs)

  

      def finalize(self, succeeded=True):

          # Only import to koji CG if the module is "build" and not scratch.

@@ -96,7 +96,8 @@ 

          raise IOError("None of {} yum config files found.".format(conf.yum_config_file))

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

+         self.db_session = db_session

          self.module_str = module.name

          self.module = module

          self.tag_name = tag_name
@@ -554,9 +555,9 @@ 

          return self.build_srpm(artifact_name, source, build_id, builder)

  

      @staticmethod

-     def get_disttag_srpm(disttag, module_build):

+     def get_disttag_srpm(session, disttag, module_build):

          # @FIXME

-         return KojiModuleBuilder.get_disttag_srpm(disttag, module_build)

+         return KojiModuleBuilder.get_disttag_srpm(session, disttag, module_build)

  

      def cancel_build(self, task_id):

          pass
@@ -575,27 +576,26 @@ 

              self._createrepo(include_module_yaml=True)

  

      @classmethod

-     def get_built_rpms_in_module_build(cls, mmd):

+     def get_built_rpms_in_module_build(cls, session, mmd):

          """

          :param Modulemd mmd: Modulemd to get the built RPMs from.

          :return: list of NVRs

          """

-         with models.make_session(conf) as db_session:

-             build = models.ModuleBuild.get_build_from_nsvc(

-                 db_session,

-                 mmd.get_module_name(),

-                 mmd.get_stream_name(),

-                 mmd.get_version(),

-                 mmd.get_context()

-             )

-             if build.koji_tag.startswith("repofile://"):

-                 # Modules from local repository have already the RPMs filled in mmd.

-                 return mmd.get_rpm_artifacts()

-             else:

-                 koji_session = KojiModuleBuilder.get_session(conf, login=False)

-                 rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

-                 nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

-                 return list(nvrs)

+         build = models.ModuleBuild.get_build_from_nsvc(

+             session,

+             mmd.get_module_name(),

+             mmd.get_stream_name(),

+             mmd.get_version(),

+             mmd.get_context()

+         )

+         if build.koji_tag.startswith("repofile://"):

+             # Modules from local repository have already the RPMs filled in mmd.

+             return mmd.get_rpm_artifacts()

+         else:

+             koji_session = KojiModuleBuilder.get_session(conf, login=False)

+             rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]

+             nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

+             return list(nvrs)

  

  

  class BaseBuilder(object):

@@ -33,9 +33,8 @@ 

  from abc import ABCMeta, abstractmethod

  from requests.exceptions import ConnectionError

  

- from module_build_service import conf, log

- import module_build_service.resolver

- import module_build_service.scm

+ from module_build_service import conf, log, db, models

+ from module_build_service.resolver import GenericResolver

  import module_build_service.utils

  from module_build_service.utils import create_dogpile_key_generator_func

  
@@ -100,7 +99,7 @@ 

          GenericBuilder.backends[backend_class.backend] = backend_class

  

      @classmethod

-     def create(cls, owner, module, backend, config, **extra):

+     def create(cls, db_session, owner, module, backend, config, **extra):

          """

          :param owner: a string representing who kicked off the builds

          :param module: module_build_service.models.ModuleBuild instance.
@@ -111,7 +110,7 @@ 

          and are implementation-dependent.

          """

          # check if the backend is within allowed backends for the used resolver

-         resolver = module_build_service.resolver.system_resolver

+         resolver = GenericResolver.create(db_session, conf)

          if not resolver.is_builder_compatible(backend):

              raise ValueError(

                  "Builder backend '{}' is not compatible with resolver backend '{}'. Check your "
@@ -120,17 +119,17 @@ 

  

          if backend in GenericBuilder.backends:

              return GenericBuilder.backends[backend](

-                 owner=owner, module=module, config=config, **extra)

+                 db_session=db_session, owner=owner, module=module, config=config, **extra)

          else:

              raise ValueError("Builder backend='%s' not recognized" % backend)

  

      @classmethod

-     def create_from_module(cls, session, module, config, buildroot_connect=True):

+     def create_from_module(cls, db_session, module, config, buildroot_connect=True):

          """

          Creates new GenericBuilder instance based on the data from module

          and config and connects it to buildroot.

  

-         :param session: SQLAlchemy databa session.

+         :param db_session: SQLAlchemy database session.

          :param module: module_build_service.models.ModuleBuild instance.

          :param config: module_build_service.config.Config instance.

          :kwarg buildroot_connect: a boolean that determines if the builder should run
@@ -138,6 +137,7 @@ 

          """

          components = [c.package for c in module.component_builds]

          builder = GenericBuilder.create(

+             db_session,

              module.owner,

              module,

              config.system,
@@ -146,7 +146,7 @@ 

              components=components,

          )

          if buildroot_connect is True:

-             groups = GenericBuilder.default_buildroot_groups(session, module)

+             groups = GenericBuilder.default_buildroot_groups(db_session, module)

              builder.buildroot_connect(groups)

          return builder

  
@@ -308,7 +308,7 @@ 

      def default_buildroot_groups(cls, session, module):

          try:

              mmd = module.mmd()

-             resolver = module_build_service.resolver.system_resolver

+             resolver = GenericResolver.create(session, conf)

  

              # Resolve default buildroot groups using the MBS, but only for

              # non-local modules.

file modified
+76 -62
@@ -107,7 +107,8 @@ 

      """ Imports the module from mmd_file

      """

      mmd = load_mmd_file(mmd_file)

-     import_mmd(db.session, mmd)

+     with models.make_session(conf) as session:

+         import_mmd(session, mmd)

  

  

  @manager.option("--stream", action="store", dest="stream")
@@ -141,60 +142,62 @@ 

              raise ValueError(

                  "Please set RESOLVER to 'mbs' in your configuration for local builds.")

  

-     with app.app_context():

-         conf.set_item("system", "mock")

-         conf.set_item("base_module_repofiles", platform_repofiles)

- 

-         # Use our own local SQLite3 database.

-         confdir = os.path.abspath(os.getcwd())

-         dbdir = \

-             os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir

-         dbpath = "/{0}".format(os.path.join(dbdir, ".mbs_local_build.db"))

-         dburi = "sqlite://" + dbpath

-         app.config["SQLALCHEMY_DATABASE_URI"] = dburi

-         conf.set_item("sqlalchemy_database_uri", dburi)

-         if os.path.exists(dbpath):

-             os.remove(dbpath)

- 

-         db.create_all()

-         if offline:

-             import_builds_from_local_dnf_repos(platform_id)

-         load_local_builds(local_build_nsvs)

- 

-         params = {}

-         params["local_build"] = True

-         params["default_streams"] = {}

-         for ns in default_streams:

-             n, s = ns.split(":")

-             params["default_streams"][n] = s

-         if srpms:

-             params["srpms"] = srpms

- 

-         username = getpass.getuser()

-         if not yaml_file or not yaml_file.endswith(".yaml"):

-             raise IOError("Provided modulemd file is not a yaml file.")

- 

-         yaml_file_path = os.path.abspath(yaml_file)

+     conf.set_item("system", "mock")

+     conf.set_item("base_module_repofiles", platform_repofiles)

+ 

+     # Use our own local SQLite3 database.

+     confdir = os.path.abspath(os.getcwd())

+     dbdir = \

+         os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir

+     dbpath = "/{0}".format(os.path.join(dbdir, ".mbs_local_build.db"))

+     dburi = "sqlite://" + dbpath

+     app.config["SQLALCHEMY_DATABASE_URI"] = dburi

+     conf.set_item("sqlalchemy_database_uri", dburi)

+     if os.path.exists(dbpath):

+         os.remove(dbpath)

+ 

+     db.create_all()

+     if offline:

+         import_builds_from_local_dnf_repos(platform_id)

+     load_local_builds(local_build_nsvs)

+ 

+     params = {}

+     params["local_build"] = True

+     params["default_streams"] = {}

+     for ns in default_streams:

+         n, s = ns.split(":")

+         params["default_streams"][n] = s

+     if srpms:

+         params["srpms"] = srpms

+ 

+     username = getpass.getuser()

+     if not yaml_file or not yaml_file.endswith(".yaml"):

+         raise IOError("Provided modulemd file is not a yaml file.")

+ 

+     yaml_file_path = os.path.abspath(yaml_file)

+ 

+     with models.make_session(conf) as session:

          with open(yaml_file_path) as fd:

              filename = os.path.basename(yaml_file)

              handle = FileStorage(fd)

              handle.filename = filename

              try:

                  modules_list = submit_module_build_from_yaml(

-                     username, handle, params, stream=str(stream), skiptests=skiptests

+                     session, username, handle, params,

+                     stream=str(stream), skiptests=skiptests

                  )

              except StreamAmbigous as e:

                  logging.error(str(e))

                  logging.error("Use '-s module_name:module_stream' to choose the stream")

                  return

  

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         stop = module_build_service.scheduler.make_simple_stop_condition(session)

  

-         # Run the consumer until stop_condition returns True

-         module_build_service.scheduler.main([], stop)

+     # Run the consumer until stop_condition returns True

+     module_build_service.scheduler.main([], stop)

  

-         if any(module.state == models.BUILD_STATES["failed"] for module in modules_list):

-             raise RuntimeError("Module build failed")

+     if any(module.state == models.BUILD_STATES["failed"] for module in modules_list):

+         raise RuntimeError("Module build failed")

  

  

  @manager.option(
@@ -226,27 +229,38 @@ 

          filter_by_kwargs["context"] = parts[3]

  

      # Find module builds to retire

-     module_builds = db.session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all()

- 

-     if not module_builds:

-         logging.info("No module builds found.")

-         return

- 

-     logging.info("Found %d module builds:", len(module_builds))

-     for build in module_builds:

-         logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context)))

- 

-     # Prompt for confirmation

-     is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds)))

-     if not is_confirmed:

-         logging.info("Module builds were NOT retired.")

-         return

- 

-     # Retire module builds

-     for build in module_builds:

-         build.transition(conf, models.BUILD_STATES["garbage"], "Module build retired")

-     db.session.commit()

-     logging.info("Module builds retired.")

+     with models.make_session(conf) as session:

+         module_builds = (

+             session.query(models.ModuleBuild)

+                    .filter_by(**filter_by_kwargs)

+                    .all()

+         )

+ 

+         if not module_builds:

+             logging.info("No module builds found.")

+             return

+ 

+         logging.info("Found %d module builds:", len(module_builds))

+         for build in module_builds:

+             logging.info("\t%s", ":".join((

+                 build.name, build.stream, build.version, build.context

+             )))

+ 

+         # Prompt for confirmation

+         is_confirmed = confirm or prompt_bool(

+             "Retire {} module builds?".format(len(module_builds))

+         )

+         if not is_confirmed:

+             logging.info("Module builds were NOT retired.")

+             return

+ 

+         # Retire module builds

+         for build in module_builds:

+             build.transition(

+                 conf, models.BUILD_STATES["garbage"], "Module build retired")

+ 

+         session.commit()

+         logging.info("Module builds retired.")

  

  

  @console_script_help

file modified
+18 -31
@@ -124,39 +124,26 @@ 

      """

      Yields new SQLAlchemy database sesssion.

      """

- 

-     # Do not use scoped_session in case we are using in-memory database,

-     # because we want to use the same session across all threads to be able

-     # to use the same in-memory database in tests.

+     # TODO - we could use ZopeTransactionExtension() here some day for

+     # improved safety on the backend.

+     engine = sqlalchemy.engine_from_config({"sqlalchemy.url": conf.sqlalchemy_database_uri})

      if conf.sqlalchemy_database_uri == "sqlite://":

-         _setup_event_listeners(db.session)

-         yield db.session

-         db.session.commit()

-         return

- 

-     # Needs to be set to create app_context.

-     if not has_app_context() and ("SERVER_NAME" not in app.config or not app.config["SERVER_NAME"]):

-         app.config["SERVER_NAME"] = "localhost"

- 

-     # If there is no app_context, we have to create one before creating

-     # the session. If we would create app_context after the session (this

-     # happens in get_url_for() method), new concurrent session would be

-     # created and this would lead to "database is locked" error for SQLite.

-     with app.app_context() if not has_app_context() else _dummy_context_mgr():

-         # TODO - we could use ZopeTransactionExtension() here some day for

-         # improved safety on the backend.

-         engine = sqlalchemy.engine_from_config({"sqlalchemy.url": conf.sqlalchemy_database_uri})

+         # Do not use scoped_session in case we are using in-memory database,

+         # because we want to use the same session across all threads to be able

+         # to use the same in-memory database in tests.

+         session = sessionmaker(bind=engine)()

+     else:

          session = scoped_session(sessionmaker(bind=engine))()

-         _setup_event_listeners(session)

-         try:

-             yield session

-             session.commit()

-         except Exception:

-             # This is a no-op if no transaction is in progress.

-             session.rollback()

-             raise

-         finally:

-             session.close()

+     _setup_event_listeners(session)

+     try:

+         yield session

+         session.commit()

+     except Exception:

+         # This is a no-op if no transaction is in progress.

+         session.rollback()

+         raise

+     finally:

+         session.close()

  

  

  class MBSBase(db.Model):

@@ -35,25 +35,29 @@ 

  class DBResolver(GenericResolver):

      """

      Resolver using the MBS database

+ 

+     :param db_session: SQLAlchemy Session object for database access.

+     :param config: the config object.

+     :type config: :class:`Config`

      """

  

      backend = "db"

  

-     def __init__(self, config):

+     def __init__(self, db_session, config):

+         self.db_session = db_session

          self.config = config

  

      def _get_module(

          self, name, stream, version, context, state=models.BUILD_STATES["ready"], strict=False

      ):

-         with models.make_session(self.config) as session:

-             mb = models.ModuleBuild.get_build_from_nsvc(

-                 session, name, stream, version, context, state=state)

-             if mb:

-                 return mb.extended_json()

+         mb = models.ModuleBuild.get_build_from_nsvc(

+             self.db_session, name, stream, version, context, state=state)

+         if mb:

+             return mb.extended_json()

  

-             if strict:

-                 raise UnprocessableEntity(

-                     "Cannot find any module builds for %s:%s" % (name, stream))

+         if strict:

+             raise UnprocessableEntity(

+                 "Cannot find any module builds for %s:%s" % (name, stream))

  

      def get_module_count(self, **kwargs):

          """
@@ -62,8 +66,7 @@ 

          :return: the number of modules that match the provided filter

          :rtype: int

          """

-         with models.make_session(self.config) as session:

-             return models.ModuleBuild.get_module_count(session, **kwargs)

+         return models.ModuleBuild.get_module_count(self.db_session, **kwargs)

  

      def get_latest_with_virtual_stream(self, name, virtual_stream):

          """
@@ -74,17 +77,17 @@ 

          :return: the module's modulemd or None

          :rtype: Modulemd.ModuleStream or None

          """

-         with models.make_session(self.config) as session:

-             query = session.query(models.ModuleBuild).filter_by(name=name)

-             query = models.ModuleBuild._add_virtual_streams_filter(session, query, [virtual_stream])

-             # Cast the version as an integer so that we get proper ordering

-             module = query.order_by(

-                 models.ModuleBuild.stream_version.desc(),

-                 sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc(),

-             ).first()

- 

-             if module:

-                 return load_mmd(module.modulemd)

+         query = self.db_session.query(models.ModuleBuild).filter_by(name=name)

+         query = models.ModuleBuild._add_virtual_streams_filter(

+             self.db_session, query, [virtual_stream])

+         # Cast the version as an integer so that we get proper ordering

+         module = query.order_by(

+             models.ModuleBuild.stream_version.desc(),

+             sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc(),

+         ).first()

+ 

+         if module:

+             return load_mmd(module.modulemd)

  

      def get_module_modulemds(

          self,
@@ -98,6 +101,7 @@ 

      ):

          """

          Gets the module modulemds from the resolver.

+ 

          :param name: a string of the module's name

          :param stream: a string of the module's stream

          :param version: a string or int of the module's version. When None, latest version will
@@ -119,29 +123,29 @@ 

                  return

              return [load_mmd(mmd["modulemd"])]

  

-         with models.make_session(self.config) as session:

-             if not version and not context:

-                 if stream_version_lte and (

-                     len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5

-                 ):

-                     stream_version = models.ModuleBuild.get_stream_version(stream)

-                     builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

-                         session, name, stream_version, virtual_streams)

-                 elif not stream_version_lte and virtual_streams:

-                     builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

-                         session, name, None, virtual_streams)

-                 else:

-                     builds = models.ModuleBuild.get_last_builds_in_stream(session, name, stream)

+         if not version and not context:

+             if stream_version_lte and (

+                 len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5

+             ):

+                 stream_version = models.ModuleBuild.get_stream_version(stream)

+                 builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

+                     self.db_session, name, stream_version, virtual_streams)

+             elif not stream_version_lte and virtual_streams:

+                 builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

+                     self.db_session, name, None, virtual_streams)

              else:

-                 raise NotImplementedError(

-                     "This combination of name/stream/version/context is not implemented")

+                 builds = models.ModuleBuild.get_last_builds_in_stream(

+                     self.db_session, name, stream)

+         else:

+             raise NotImplementedError(

+                 "This combination of name/stream/version/context is not implemented")

  

-             if not builds and strict:

-                 raise UnprocessableEntity(

-                     "Cannot find any module builds for %s:%s" % (name, stream))

-             return [build.mmd() for build in builds]

+         if not builds and strict:

+             raise UnprocessableEntity(

+                 "Cannot find any module builds for %s:%s" % (name, stream))

+         return [build.mmd() for build in builds]

  

-     def get_buildrequired_modulemds(self, name, stream, base_module_nsvc):

+     def get_buildrequired_modulemds(self, name, stream, base_module_nsvc, strict=False):

          """

          Returns modulemd metadata of all module builds with `name` and `stream` buildrequiring

          base module defined by `base_module_nsvc` NSVC.
@@ -154,51 +158,50 @@ 

          :return: List of modulemd metadata.

          """

          log.debug("Looking for %s:%s buildrequiring %s", name, stream, base_module_nsvc)

-         with models.make_session(self.config) as session:

-             query = session.query(models.ModuleBuild)

-             query = query.filter_by(name=name, stream=stream, state=models.BUILD_STATES["ready"])

- 

-             module_br_alias = aliased(models.ModuleBuild, name="module_br")

-             # Shorten this table name for clarity in the query below

-             mb_to_br = models.module_builds_to_module_buildrequires

-             # The following joins get added:

-             # JOIN module_builds_to_module_buildrequires

-             #     ON module_builds_to_module_buildrequires.module_id = module_builds.id

-             # JOIN module_builds AS module_br

-             #     ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id

-             query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join(

-                 module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)

- 

-             # Get only modules buildrequiring particular base_module_nsvc

-             n, s, v, c = base_module_nsvc.split(":")

-             query = query.filter(

-                 module_br_alias.name == n,

-                 module_br_alias.stream == s,

-                 module_br_alias.version == v,

-                 module_br_alias.context == c,

-             )

-             query = query.order_by(

-                 sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc())

-             all_builds = query.all()

- 

-             # The `all_builds` list contains builds sorted by "build.version". We need only

-             # the builds with latest version, but in all contexts.

-             builds = []

-             latest_version = None

-             for build in all_builds:

-                 if latest_version is None:

-                     latest_version = build.version

-                 if latest_version != build.version:

-                     break

-                 builds.append(build)

- 

-             mmds = [build.mmd() for build in builds]

-             nsvcs = [

-                 mmd.get_nsvc()

-                 for mmd in mmds

-             ]

-             log.debug("Found: %r", nsvcs)

-             return mmds

+         query = self.db_session.query(models.ModuleBuild)

+         query = query.filter_by(name=name, stream=stream, state=models.BUILD_STATES["ready"])

+ 

+         module_br_alias = aliased(models.ModuleBuild, name="module_br")

+         # Shorten this table name for clarity in the query below

+         mb_to_br = models.module_builds_to_module_buildrequires

+         # The following joins get added:

+         # JOIN module_builds_to_module_buildrequires

+         #     ON module_builds_to_module_buildrequires.module_id = module_builds.id

+         # JOIN module_builds AS module_br

+         #     ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id

+         query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join(

+             module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)

+ 

+         # Get only modules buildrequiring particular base_module_nsvc

+         n, s, v, c = base_module_nsvc.split(":")

+         query = query.filter(

+             module_br_alias.name == n,

+             module_br_alias.stream == s,

+             module_br_alias.version == v,

+             module_br_alias.context == c,

+         )

+         query = query.order_by(

+             sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc())

+         all_builds = query.all()

+ 

+         # The `all_builds` list contains builds sorted by "build.version". We need only

+         # the builds with latest version, but in all contexts.

+         builds = []

+         latest_version = None

+         for build in all_builds:

+             if latest_version is None:

+                 latest_version = build.version

+             if latest_version != build.version:

+                 break

+             builds.append(build)

+ 

+         mmds = [build.mmd() for build in builds]

+         nsvcs = [

+             mmd.get_nsvc()

+             for mmd in mmds

+         ]

+         log.debug("Found: %r", nsvcs)

+         return mmds

  

      def resolve_profiles(self, mmd, keys):

          """
@@ -207,6 +210,7 @@ 

          the key in all buildrequires. If there are some modules loaded by

          utils.load_local_builds(...), these local modules will be considered when returning

          the profiles.

+ 

          :param mmd: Modulemd.ModuleStream instance representing the module

          :param keys: list of modulemd installation profiles to include in the result

          :return: a dictionary
@@ -214,44 +218,43 @@ 

          results = {}

          for key in keys:

              results[key] = set()

-         with models.make_session(self.config) as session:

-             for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():

-                 local_modules = models.ModuleBuild.local_modules(

-                     session, module_name, module_info["stream"])

-                 if local_modules:

-                     local_module = local_modules[0]

-                     log.info("Using local module {0!r} to resolve profiles.".format(local_module))

-                     dep_mmd = local_module.mmd()

-                     for key in keys:

-                         profile = dep_mmd.get_profile(key)

-                         if profile:

-                             results[key] |= set(profile.get_rpms())

-                     continue

- 

-                 build = models.ModuleBuild.get_build_from_nsvc(

-                     session,

-                     module_name,

-                     module_info["stream"],

-                     module_info["version"],

-                     module_info["context"],

-                     state=models.BUILD_STATES["ready"],

-                 )

-                 if not build:

-                     raise UnprocessableEntity(

-                         "The module {}:{}:{}:{} was not found".format(

-                             module_name,

-                             module_info["stream"],

-                             module_info["version"],

-                             module_info["context"],

-                         )

-                     )

-                 dep_mmd = build.mmd()

- 

-                 # Take note of what rpms are in this dep's profile

+         for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, module_name, module_info["stream"])

+             if local_modules:

+                 local_module = local_modules[0]

+                 log.info("Using local module {0!r} to resolve profiles.".format(local_module))

+                 dep_mmd = local_module.mmd()

                  for key in keys:

                      profile = dep_mmd.get_profile(key)

                      if profile:

                          results[key] |= set(profile.get_rpms())

+                 continue

+ 

+             build = models.ModuleBuild.get_build_from_nsvc(

+                 self.db_session,

+                 module_name,

+                 module_info["stream"],

+                 module_info["version"],

+                 module_info["context"],

+                 state=models.BUILD_STATES["ready"],

+             )

+             if not build:

+                 raise UnprocessableEntity(

+                     "The module {}:{}:{}:{} was not found".format(

+                         module_name,

+                         module_info["stream"],

+                         module_info["version"],

+                         module_info["context"],

+                     )

+                 )

+             dep_mmd = build.mmd()

+ 

+             # Take note of what rpms are in this dep's profile

+             for key in keys:

+                 profile = dep_mmd.get_profile(key)

+                 if profile:

+                     results[key] |= set(profile.get_rpms())

  

          # Return the union of all rpms in all profiles of the given keys

          return results
@@ -288,53 +291,52 @@ 

              )

  

          module_tags = {}

-         with models.make_session(self.config) as session:

-             if mmd:

-                 queried_mmd = mmd

-                 nsvc = ":".join([

-                     mmd.get_module_name(),

-                     mmd.get_stream_name(),

-                     str(mmd.get_version()),

-                     mmd.get_context() or models.DEFAULT_MODULE_CONTEXT,

-                 ])

-             else:

-                 build = models.ModuleBuild.get_build_from_nsvc(

-                     session, name, stream, version, context)

-                 if not build:

-                     raise UnprocessableEntity(

-                         "The module {} was not found".format(

-                             ":".join([name, stream, version, context]))

-                     )

-                 queried_mmd = build.mmd()

-                 nsvc = ":".join([name, stream, version, context])

- 

-             xmd_mbs = queried_mmd.get_xmd().get("mbs", {})

-             if "buildrequires" not in xmd_mbs:

-                 raise RuntimeError(

-                     "The module {} did not contain its modulemd or did not have "

-                     "its xmd attribute filled out in MBS".format(nsvc)

+         if mmd:

+             queried_mmd = mmd

+             nsvc = ":".join([

+                 mmd.get_module_name(),

+                 mmd.get_stream_name(),

+                 str(mmd.get_version()),

+                 mmd.get_context() or models.DEFAULT_MODULE_CONTEXT,

+             ])

+         else:

+             build = models.ModuleBuild.get_build_from_nsvc(

+                 self.db_session, name, stream, version, context)

+             if not build:

+                 raise UnprocessableEntity(

+                     "The module {} was not found".format(

+                         ":".join([name, stream, version, context]))

                  )

+             queried_mmd = build.mmd()

+             nsvc = ":".join([name, stream, version, context])

+ 

+         xmd_mbs = queried_mmd.get_xmd().get("mbs", {})

+         if "buildrequires" not in xmd_mbs:

+             raise RuntimeError(

+                 "The module {} did not contain its modulemd or did not have "

+                 "its xmd attribute filled out in MBS".format(nsvc)

+             )

  

-             buildrequires = xmd_mbs["buildrequires"]

-             for br_name, details in buildrequires.items():

-                 build = models.ModuleBuild.get_build_from_nsvc(

-                     session,

-                     br_name,

-                     details["stream"],

-                     details["version"],

-                     details["context"],

-                     state=models.BUILD_STATES["ready"],

-                 )

-                 if not build:

-                     raise RuntimeError(

-                         "Buildrequired module %s %r does not exist in MBS db" % (br_name, details))

+         buildrequires = xmd_mbs["buildrequires"]

+         for br_name, details in buildrequires.items():

+             build = models.ModuleBuild.get_build_from_nsvc(

+                 self.db_session,

+                 br_name,

+                 details["stream"],

+                 details["version"],

+                 details["context"],

+                 state=models.BUILD_STATES["ready"],

+             )

+             if not build:

+                 raise RuntimeError(

+                     "Buildrequired module %s %r does not exist in MBS db" % (br_name, details))

  

-                 # If the buildrequire is a meta-data only module with no Koji tag set, then just

-                 # skip it

-                 if build.koji_tag is None:

-                     continue

-                 module_tags.setdefault(build.koji_tag, [])

-                 module_tags[build.koji_tag].append(build.mmd())

+             # If the buildrequire is a meta-data only module with no Koji tag set, then just

+             # skip it

+             if build.koji_tag is None:

+                 continue

+             module_tags.setdefault(build.koji_tag, [])

+             module_tags[build.koji_tag].append(build.mmd())

  

          return module_tags

  
@@ -346,74 +348,75 @@ 

          If there are some modules loaded by utils.load_local_builds(...), these

          local modules will be considered when resolving the requires. A RuntimeError

          is raised on DB lookup errors.

+ 

          :param requires: a list of N:S or N:S:V:C strings

          :return: a dictionary

          """

          new_requires = {}

-         with models.make_session(self.config) as session:

-             for nsvc in requires:

-                 nsvc_splitted = nsvc.split(":")

-                 if len(nsvc_splitted) == 2:

-                     module_name, module_stream = nsvc_splitted

-                     module_version = None

-                     module_context = None

-                 elif len(nsvc_splitted) == 4:

-                     module_name, module_stream, module_version, module_context = nsvc_splitted

-                 else:

-                     raise ValueError(

-                         "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)

- 

-                 local_modules = models.ModuleBuild.local_modules(

-                     session, module_name, module_stream)

-                 if local_modules:

-                     local_build = local_modules[0]

-                     new_requires[module_name] = {

-                         "ref": None,

-                         "stream": local_build.stream,

-                         "version": local_build.version,

-                         "context": local_build.context,

-                         "koji_tag": local_build.koji_tag,

-                     }

-                     continue

- 

-                 if module_version is None or module_context is None:

-                     build = models.ModuleBuild.get_last_build_in_stream(

-                         session, module_name, module_stream)

-                 else:

-                     build = models.ModuleBuild.get_build_from_nsvc(

-                         session, module_name, module_stream, module_version, module_context)

- 

-                 if not build:

-                     raise UnprocessableEntity("The module {} was not found".format(nsvc))

- 

-                 commit_hash = None

-                 mmd = build.mmd()

-                 mbs_xmd = mmd.get_xmd().get("mbs", {})

-                 if mbs_xmd.get("commit"):

-                     commit_hash = mbs_xmd["commit"]

-                 else:

-                     raise RuntimeError(

-                         'The module "{0}" didn\'t contain a commit hash in its xmd'.format(

-                             module_name)

-                     )

  

-                 if not mbs_xmd.get("mse"):

-                     raise RuntimeError(

-                         'The module "{}" is not built using Module Stream Expansion. '

-                         "Please rebuild this module first".format(nsvc)

-                     )

+         for nsvc in requires:

+             nsvc_splitted = nsvc.split(":")

+             if len(nsvc_splitted) == 2:

+                 module_name, module_stream = nsvc_splitted

+                 module_version = None

+                 module_context = None

+             elif len(nsvc_splitted) == 4:

+                 module_name, module_stream, module_version, module_context = nsvc_splitted

+             else:

+                 raise ValueError(

+                     "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)

  

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, module_name, module_stream)

+             if local_modules:

+                 local_build = local_modules[0]

                  new_requires[module_name] = {

-                     "ref": commit_hash,

-                     "stream": module_stream,

-                     "version": build.version,

-                     "context": build.context,

-                     "koji_tag": build.koji_tag,

+                     "ref": None,

+                     "stream": local_build.stream,

+                     "version": local_build.version,

+                     "context": local_build.context,

+                     "koji_tag": local_build.koji_tag,

                  }

+                 continue

+ 

+             if module_version is None or module_context is None:

+                 build = models.ModuleBuild.get_last_build_in_stream(

+                     self.db_session, module_name, module_stream)

+             else:

+                 build = models.ModuleBuild.get_build_from_nsvc(

+                     self.db_session,

+                     module_name, module_stream, module_version, module_context)

+ 

+             if not build:

+                 raise UnprocessableEntity("The module {} was not found".format(nsvc))

+ 

+             commit_hash = None

+             mmd = build.mmd()

+             mbs_xmd = mmd.get_xmd().get("mbs", {})

+             if mbs_xmd.get("commit"):

+                 commit_hash = mbs_xmd["commit"]

+             else:

+                 raise RuntimeError(

+                     'The module "{0}" didn\'t contain a commit hash in its xmd'.format(

+                         module_name)

+                 )

+ 

+             if not mbs_xmd.get("mse"):

+                 raise RuntimeError(

+                     'The module "{}" is not built using Module Stream Expansion. '

+                     "Please rebuild this module first".format(nsvc)

+                 )

+ 

+             new_requires[module_name] = {

+                 "ref": commit_hash,

+                 "stream": module_stream,

+                 "version": build.version,

+                 "context": build.context,

+                 "koji_tag": build.koji_tag,

+             }

  

          return new_requires

  

      def get_modulemd_by_koji_tag(self, tag):

-         with models.make_session(self.config) as session:

-             module = models.ModuleBuild.get_build_by_koji_tag(session, tag)

-             return module.mmd() if module else None

+         module = models.ModuleBuild.get_build_by_koji_tag(self.db_session, tag)

+         return module.mmd() if module else None

@@ -48,6 +48,7 @@ 

          incompatible streams. This platform stream is also expected to not follow

          the "X.Y.Z" formatting which is needed for stream versions.

  

+         :param session: SQLAlchemy session for database access.

          :param str name: Name of module to return.

          :param str stream: Stream of module to return.

          :param str base_module_nsvc: Ignored in LocalResolver.

@@ -39,10 +39,17 @@ 

  

  

  class MBSResolver(GenericResolver):

+     """Resolver using remote MBS instance.

+ 

+     :param db_session: SQLAlchemy Session object for database access.

+     :param config: the config object.

+     :type config: :class:`Config`

+     """

  

      backend = "mbs"

  

-     def __init__(self, config):

+     def __init__(self, db_session, config):

+         self.db_session = db_session

          self.mbs_prod_url = config.mbs_url

          self.session = requests.Session()

          adapter = requests.adapters.HTTPAdapter(max_retries=3)
@@ -179,6 +186,7 @@ 

      ):

          """

          Gets the module modulemds from the resolver.

+ 

          :param name: a string of the module's name

          :param stream: a string of the module's stream

          :param version: a string or int of the module's version. When None, latest version will
@@ -196,7 +204,7 @@ 

          """

          yaml = None

  

-         local_modules = models.ModuleBuild.local_modules(db.session, name, stream)

+         local_modules = models.ModuleBuild.local_modules(self.db_session, name, stream)

          if local_modules:

              return [m.mmd() for m in local_modules]

  
@@ -229,7 +237,7 @@ 

              mmds.append(load_mmd(yaml))

          return mmds

  

-     def get_buildrequired_modulemds(self, name, stream, base_module_nsvc):

+     def get_buildrequired_modulemds(self, name, stream, base_module_nsvc, strict=False):

          """

          Returns modulemd metadata of all module builds with `name` and `stream` buildrequiring

          base module defined by `base_module_nsvc` NSVC.
@@ -262,9 +270,10 @@ 

          results = {}

          for key in keys:

              results[key] = set()

+ 

          for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():

              local_modules = models.ModuleBuild.local_modules(

-                 db.session, module_name, module_info["stream"])

+                 self.db_session, module_name, module_info["stream"])

              if local_modules:

                  local_module = local_modules[0]

                  log.info("Using local module %r to resolve profiles.", local_module)
@@ -348,7 +357,8 @@ 

          buildrequires = queried_mmd.get_xmd()["mbs"]["buildrequires"]

          # Queue up the next tier of deps that we should look at..

          for name, details in buildrequires.items():

-             local_modules = models.ModuleBuild.local_modules(db.session, name, details["stream"])

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, name, details["stream"])

              if local_modules:

                  for m in local_modules:

                      # If the buildrequire is a meta-data only module with no Koji tag set, then just
@@ -382,6 +392,7 @@ 

          If there are some modules loaded by utils.load_local_builds(...), these

          local modules will be considered when resolving the requires. A RuntimeError

          is raised on MBS lookup errors.

+ 

          :param requires: a list of N:S or N:S:V:C strings

          :return: a dictionary

          """
@@ -399,7 +410,8 @@ 

                      "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)

              # Try to find out module dependency in the local module builds

              # added by utils.load_local_builds(...).

-             local_modules = models.ModuleBuild.local_modules(db.session, module_name, module_stream)

+             local_modules = models.ModuleBuild.local_modules(

+                 self.db_session, module_name, module_stream)

              if local_modules:

                  local_build = local_modules[0]

                  new_requires[module_name] = {
@@ -461,7 +473,7 @@ 

              # If the module is a base module, then import it in the database so that entries in

              # the module_builds_to_module_buildrequires table can be created later on

              if module_name in conf.base_module_names:

-                 import_mmd(db.session, mmd)

+                 import_mmd(self.db_session, mmd)

  

          return new_requires

  

@@ -33,8 +33,3 @@ 

  

  if not GenericResolver.backends:

      raise ValueError("No resolver plugins are installed or available.")

- 

- # Config has the option of which resolver should be used for current MBS run.

- # Hence, create a singleton system wide resolver for use. However, resolver

- # could be created with other required arguments in concrete cases.

- system_resolver = GenericResolver.create(conf)

@@ -52,7 +52,7 @@ 

          GenericResolver.backends[backend_class.backend] = backend_class

  

      @classmethod

-     def create(cls, config, backend=None, **extra):

+     def create(cls, db_session, config, backend=None, **extra):

          """Factory method to create a resolver object

  

          :param config: MBS config object.
@@ -71,7 +71,7 @@ 

              backend = conf.resolver

  

          if backend in GenericResolver.backends:

-             return GenericResolver.backends[backend](config, **extra)

+             return GenericResolver.backends[backend](db_session, config, **extra)

          else:

              raise ValueError("Resolver backend='%s' not recognized" % backend)

  

@@ -51,6 +51,8 @@ 

  

      The stop_condition returns true when the latest module build enters the any

      of the finished states.

+ 

+     :param session: a SQLAlchemy session.

      """

  

      def stop_condition(message):

@@ -28,6 +28,7 @@ 

  import module_build_service.resolver

  import module_build_service.utils

  import module_build_service.messaging

+ from module_build_service.resolver import GenericResolver

  from module_build_service.utils import (

      attempt_to_reuse_all_components,

      record_component_builds,
@@ -161,8 +162,8 @@ 

          record_component_builds(mmd, build, session=session)

          # The ursine.handle_stream_collision_modules is Koji specific.

          if conf.system in ["koji", "test"]:

-             handle_stream_collision_modules(mmd)

-         mmd = record_filtered_rpms(mmd)

+             handle_stream_collision_modules(session, mmd)

+         mmd = record_filtered_rpms(session, mmd)

          build.modulemd = mmd_to_str(mmd)

          build.transition(conf, models.BUILD_STATES["wait"])

      # Catch custom exceptions that we can expose to the user
@@ -218,16 +219,17 @@ 

  @module_build_service.utils.retry(

      interval=10, timeout=120, wait_on=(ValueError, RuntimeError, ConnectionError)

  )

- def get_module_build_dependencies(build):

+ def get_module_build_dependencies(session, build):

      """Used by wait handler to get module's build dependencies

  

+     :param session: SQLAlchemy session for database access.

      :param build: a module build.

      :type build: :class:`ModuleBuild`

      :return: the value returned from :meth:`get_module_build_dependencies`

          according to the configured resolver.

      :rtype: dict[str, Modulemd.Module]

      """

-     resolver = module_build_service.resolver.system_resolver

+     resolver = GenericResolver.create(session, conf)

      if conf.system in ["koji", "test"]:

          # For Koji backend, query for the module we are going to

          # build to get the koji_tag and deps from it.
@@ -310,7 +312,7 @@ 

          pass

  

      try:

-         build_deps = get_module_build_dependencies(build)

+         build_deps = get_module_build_dependencies(session, build)

      except ValueError:

          reason = "Failed to get module info from MBS. Max retries reached."

          log.exception(reason)
@@ -377,7 +379,9 @@ 

  

      component_build = models.ComponentBuild.from_component_name(session, artifact_name, build.id)

      further_work = []

-     srpm = builder.get_disttag_srpm(disttag=".%s" % get_rpm_release(build), module_build=build)

+     srpm = builder.get_disttag_srpm(session=session,

+                                     disttag=".%s" % get_rpm_release(build),

+                                     module_build=build)

      if not component_build:

          component_build = models.ComponentBuild(

              module_id=build.id,

@@ -108,6 +108,7 @@ 

          session, module_build)

  

      builder = module_build_service.builder.GenericBuilder.create(

+         session,

          module_build.owner,

          module_build,

          config.system,

@@ -410,6 +410,7 @@ 

      The ModuleBuild.rebuild_strategy is set to "all".

      The ModuleBuild.owner is set to "mbs_import".

  

+     :param session: SQLAlchemy session for database access.

      :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD

          have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in

          the `ModuleBuild.buildrequires` according to this data.

@@ -22,12 +22,12 @@ 

  # Written by Ralph Bean <rbean@redhat.com>

  #            Matt Prahl <mprahl@redhat.com>

  #            Jan Kaluza <jkaluza@redhat.com>

- from module_build_service import log, models, Modulemd, db, conf

+ from module_build_service import log, models, Modulemd, conf

  from module_build_service.errors import StreamAmbigous

  from module_build_service.errors import UnprocessableEntity

  from module_build_service.mmd_resolver import MMDResolver

  from module_build_service.utils.general import deps_to_dict, mmd_to_str

- import module_build_service.resolver

+ from module_build_service.resolver import GenericResolver

  

  

  def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream_ambigous):
@@ -128,6 +128,7 @@ 

  

  

  def _get_mmds_from_requires(

+     session,

      requires,

      mmds,

      recursive=False,
@@ -139,6 +140,7 @@ 

      Helper method for get_mmds_required_by_module_recursively returning

      the list of module metadata objects defined by `requires` dict.

  

+     :param session: SQLAlchemy session for database access.

      :param dict requires: requires or buildrequires in the form {module: [streams]}

      :param mmds: Dictionary with already handled name:streams as a keys and lists

          of resulting mmds as values.
@@ -158,7 +160,7 @@ 

      # To be able to call itself recursively, we need to store list of mmds

      # we have added to global mmds list in this particular call.

      added_mmds = {}

-     resolver = module_build_service.resolver.system_resolver

+     resolver = GenericResolver.create(session, conf)

  

      for name, streams in requires.items():

          # Base modules are already added to `mmds`.
@@ -188,7 +190,8 @@ 

              if base_module_mmds:

                  for base_module_mmd in base_module_mmds:

                      base_module_nsvc = base_module_mmd.get_nsvc()

-                     mmds[ns] += resolver.get_buildrequired_modulemds(name, stream, base_module_nsvc)

+                     mmds[ns] += resolver.get_buildrequired_modulemds(

+                         name, stream, base_module_nsvc)

              else:

                  mmds[ns] = resolver.get_module_modulemds(name, stream, strict=True)

              added_mmds[ns] += mmds[ns]
@@ -200,16 +203,17 @@ 

                  for deps in mmd.get_dependencies():

                      deps_dict = deps_to_dict(deps, 'runtime')

                      mmds = _get_mmds_from_requires(

-                         deps_dict, mmds, True, base_module_mmds=base_module_mmds)

+                         session, deps_dict, mmds, True, base_module_mmds=base_module_mmds)

  

      return mmds

  

  

- def _get_base_module_mmds(mmd):

+ def _get_base_module_mmds(session, mmd):

      """

      Returns list of MMDs of base modules buildrequired by `mmd` including the compatible

      old versions of the base module based on the stream version.

  

+     :param session: SQLAlchemy session for database access.

      :param Modulemd mmd: Input modulemd metadata.

      :rtype: list of Modulemd

      :return: List of MMDs of base modules buildrequired by `mmd`.
@@ -217,7 +221,7 @@ 

      seen = set()

      ret = []

  

-     resolver = module_build_service.resolver.system_resolver

+     resolver = GenericResolver.create(session, conf)

      for deps in mmd.get_dependencies():

          buildrequires = {

              module: deps.get_buildtime_streams(module)
@@ -269,7 +273,8 @@ 

                      stream_version_lte = False

  

                  mmds = resolver.get_module_modulemds(

-                     name, stream, stream_version_lte=stream_version_lte,

+                     name, stream,

+                     stream_version_lte=stream_version_lte,

                      virtual_streams=virtual_streams)

                  ret_chunk = []

                  # Add the returned mmds to the `seen` set to avoid querying those individually if
@@ -289,7 +294,7 @@ 

  

  

  def get_mmds_required_by_module_recursively(

-     mmd, default_streams=None, raise_if_stream_ambigous=False

+     session, mmd, default_streams=None, raise_if_stream_ambigous=False

  ):

      """

      Returns the list of Module metadata objects of all modules required while
@@ -304,6 +309,7 @@ 

      recursively all the "requires" and finds the latest version of each

      required module and also all contexts of these latest versions.

  

+     :param session: SQLAlchemy session for database access.

      :param dict default_streams: Dict in {module_name: module_stream, ...} format defining

          the default stream to choose for module in case when there are multiple streams to

          choose from.
@@ -321,7 +327,7 @@ 

      mmds = {}

  

      # Get the MMDs of all compatible base modules based on the buildrequires.

-     base_module_mmds = _get_base_module_mmds(mmd)

+     base_module_mmds = _get_base_module_mmds(session, mmd)

      if not base_module_mmds:

          base_module_choices = " or ".join(conf.base_module_names)

          raise UnprocessableEntity(
@@ -339,7 +345,8 @@ 

      for deps in mmd.get_dependencies():

          deps_dict = deps_to_dict(deps, 'buildtime')

          mmds = _get_mmds_from_requires(

-             deps_dict, mmds, False, default_streams, raise_if_stream_ambigous, base_module_mmds)

+             session, deps_dict, mmds, False, default_streams,

+             raise_if_stream_ambigous, base_module_mmds)

  

      # Now get the requires of buildrequires recursively.

      for mmd_key in list(mmds.keys()):
@@ -347,13 +354,8 @@ 

              for deps in mmd.get_dependencies():

                  deps_dict = deps_to_dict(deps, 'runtime')

                  mmds = _get_mmds_from_requires(

-                     deps_dict,

-                     mmds,

-                     True,

-                     default_streams,

-                     raise_if_stream_ambigous,

-                     base_module_mmds,

-                 )

+                     session, deps_dict, mmds, True, default_streams,

+                     raise_if_stream_ambigous, base_module_mmds)

  

      # Make single list from dict of lists.

      res = []
@@ -379,9 +381,6 @@ 

          the default stream to choose for module in case when there are multiple streams to

          choose from.

      """

-     if not session:

-         session = db.session

- 

      if not default_streams:

          default_streams = {}

  
@@ -399,7 +398,7 @@ 

      # and add them to MMDResolver.

      mmd_resolver = MMDResolver()

      mmds_for_resolving = get_mmds_required_by_module_recursively(

-         current_mmd, default_streams, raise_if_stream_ambigous)

+         session, current_mmd, default_streams, raise_if_stream_ambigous)

      for m in mmds_for_resolving:

          mmd_resolver.add_modules(m)

  
@@ -512,7 +511,7 @@ 

          # Resolve the buildrequires and store the result in XMD.

          if "mbs" not in xmd:

              xmd["mbs"] = {}

-         resolver = module_build_service.resolver.system_resolver

+         resolver = GenericResolver.create(session, conf)

          xmd["mbs"]["buildrequires"] = resolver.resolve_requires(br_list)

          xmd["mbs"]["mse"] = True

  

@@ -68,7 +68,7 @@ 

              session.add(build)

  

  

- def record_filtered_rpms(mmd):

+ def record_filtered_rpms(session, mmd):

      """Record filtered RPMs that should not be installed into buildroot

  

      These RPMs are filtered:
@@ -84,7 +84,7 @@ 

      from module_build_service.builder import GenericBuilder

      from module_build_service.resolver import GenericResolver

  

-     resolver = GenericResolver.create(conf)

+     resolver = GenericResolver.create(session, conf)

      builder = GenericBuilder.backends[conf.system]

  

      new_buildrequires = {}
@@ -105,7 +105,7 @@ 

          filtered_rpms = []

          rpm_filter = req_mmd.get_rpm_filters()

          if rpm_filter:

-             built_nvrs = builder.get_built_rpms_in_module_build(req_mmd)

+             built_nvrs = builder.get_built_rpms_in_module_build(session, req_mmd)

              for nvr in built_nvrs:

                  parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

                  if parsed_nvr["name"] in rpm_filter:
@@ -539,7 +539,8 @@ 

      return batch

  

  

- def submit_module_build_from_yaml(username, handle, params, stream=None, skiptests=False):

+ def submit_module_build_from_yaml(session, username, handle, params,

+                                   stream=None, skiptests=False):

      yaml_file = to_text_type(handle.read())

      mmd = load_mmd(yaml_file)

      dt = datetime.utcfromtimestamp(int(time.time()))
@@ -562,13 +563,13 @@ 

          macros = buildopts.get_rpm_macros() or ""

          buildopts.set_rpm_macros(macros + "\n\n%__spec_check_pre exit 0\n")

          mmd.set_buildopts(buildopts)

-     return submit_module_build(username, mmd, params)

+     return submit_module_build(session, username, mmd, params)

  

  

  _url_check_re = re.compile(r"^[^:/]+:.*$")

  

  

- def submit_module_build_from_scm(username, params, allow_local_url=False):

+ def submit_module_build_from_scm(session, username, params, allow_local_url=False):

      url = params["scmurl"]

      branch = params["branch"]

      # Translate local paths into file:// URL
@@ -578,7 +579,7 @@ 

          url = "file://" + url

      mmd, scm = _fetch_mmd(url, branch, allow_local_url)

  

-     return submit_module_build(username, mmd, params)

+     return submit_module_build(session, username, mmd, params)

  

  

  def _apply_dep_overrides(mmd, params):
@@ -685,7 +686,7 @@ 

              )

  

  

- def _modify_buildtime_streams(mmd, new_streams_func):

+ def _modify_buildtime_streams(session, mmd, new_streams_func):

      """

      Modify buildtime streams using the input new_streams_func.

  
@@ -702,7 +703,7 @@ 

          new_dep = Modulemd.Dependencies()

  

          for name, streams in brs.items():

-             new_streams = new_streams_func(name, streams)

+             new_streams = new_streams_func(session, name, streams)

              if streams != new_streams:

                  overridden = True

  
@@ -726,16 +727,18 @@ 

              mmd.add_dependencies(new_dep)

  

  

- def resolve_base_module_virtual_streams(name, streams):

+ def resolve_base_module_virtual_streams(db_session, name, streams):

      """

      Resolve any base module virtual streams and return a copy of `streams` with the resolved values.

  

+     :param db_session: SQLAlchemy session for database access.

      :param str name: the module name

      :param str streams: the streams to resolve

      :return: the resolved streams

      :rtype: list

      """

-     from module_build_service.resolver import system_resolver

+     from module_build_service.resolver import GenericResolver

+     resolver = GenericResolver.create(db_session, conf)

  

      if name not in conf.base_module_names:

          return streams
@@ -749,7 +752,7 @@ 

  

          # Check if the base module stream is available

          log.debug('Checking to see if the base module "%s:%s" is available', name, stream)

-         if system_resolver.get_module_count(name=name, stream=stream) > 0:

+         if resolver.get_module_count(name=name, stream=stream) > 0:

              continue

  

          # If the base module stream is not available, check if there's a virtual stream
@@ -757,9 +760,8 @@ 

              'Checking to see if there is a base module "%s" with the virtual stream "%s"',

              name, stream,

          )

-         base_module_mmd = system_resolver.get_latest_with_virtual_stream(

-             name=name, virtual_stream=stream

-         )

+         base_module_mmd = resolver.get_latest_with_virtual_stream(

+             name=name, virtual_stream=stream)

          if not base_module_mmd:

              # If there isn't this base module stream or virtual stream available, skip it,

              # and let the dep solving code deal with it like it normally would
@@ -780,7 +782,7 @@ 

      return new_streams

  

  

- def _process_support_streams(mmd, params):

+ def _process_support_streams(session, mmd, params):

      """

      Check if any buildrequired base modules require a support stream suffix.

  
@@ -803,7 +805,7 @@ 

  

      buildrequire_overrides = params.get("buildrequire_overrides", {})

  

-     def new_streams_func(name, streams):

+     def new_streams_func(db_session, name, streams):

          if name not in conf.base_module_names:

              log.debug("The module %s is not a base module. Skipping the release date check.", name)

              return streams
@@ -896,13 +898,14 @@ 

  

          return new_streams

  

-     _modify_buildtime_streams(mmd, new_streams_func)

+     _modify_buildtime_streams(session, mmd, new_streams_func)

  

  

- def submit_module_build(username, mmd, params):

+ def submit_module_build(session, username, mmd, params):

      """

      Submits new module build.

  

+     :param session: a SQLAlchemy session for database access.

      :param str username: Username of the build's owner.

      :param Modulemd.ModuleStream mmd: Modulemd defining the build.

      :param dict params: the API parameters passed in by the user
@@ -931,10 +934,10 @@ 

      if "default_streams" in params:

          default_streams = params["default_streams"]

      _apply_dep_overrides(mmd, params)

-     _modify_buildtime_streams(mmd, resolve_base_module_virtual_streams)

-     _process_support_streams(mmd, params)

+     _modify_buildtime_streams(session, mmd, resolve_base_module_virtual_streams)

+     _process_support_streams(session, mmd, params)

  

-     mmds = generate_expanded_mmds(db.session, mmd, raise_if_stream_ambigous, default_streams)

+     mmds = generate_expanded_mmds(session, mmd, raise_if_stream_ambigous, default_streams)

      if not mmds:

          raise ValidationError(

              "No dependency combination was satisfied. Please verify the "
@@ -954,7 +957,7 @@ 

          nsvc = mmd.get_nsvc()

  

          log.debug("Checking whether module build already exists: %s.", nsvc)

-         module = models.ModuleBuild.get_build_from_nsvc(db.session, *nsvc.split(":"))

+         module = models.ModuleBuild.get_build_from_nsvc(session, *nsvc.split(":"))

          if module and not params.get("scratch", False):

              if module.state != models.BUILD_STATES["failed"]:

                  log.info(
@@ -993,7 +996,7 @@ 

              if params.get("scratch", False):

                  log.debug("Checking for existing scratch module builds by NSVC")

                  scrmods = models.ModuleBuild.get_scratch_builds_from_nsvc(

-                     db.session, *nsvc.split(":"))

+                     session, *nsvc.split(":"))

                  scrmod_contexts = [scrmod.context for scrmod in scrmods]

                  log.debug(

                      "Found %d previous scratch module build context(s): %s",
@@ -1005,7 +1008,7 @@ 

  

              log.debug("Creating new module build")

              module = models.ModuleBuild.create(

-                 db.session,

+                 session,

                  conf,

                  name=mmd.get_module_name(),

                  stream=mmd.get_stream_name(),
@@ -1026,8 +1029,8 @@ 

              module.context += context_suffix

  

          all_modules_skipped = False

-         db.session.add(module)

-         db.session.commit()

+         session.add(module)

+         session.commit()

          modules.append(module)

          log.info('The user "%s" submitted the build "%s"', username, nsvc)

  

@@ -24,7 +24,6 @@ 

  import re

  

  from module_build_service import conf, log

- from module_build_service.resolver import system_resolver

  

  

  """
@@ -107,7 +106,7 @@ 

      ]

  

  

- def get_modulemds_from_ursine_content(tag):

+ def get_modulemds_from_ursine_content(session, tag):

      """Get all modules metadata which were added to ursine content

  

      Ursine content is the tag inheritance managed by Ursa-Major by adding
@@ -131,6 +130,9 @@ 

      """

      from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  

+     from module_build_service.resolver import GenericResolver

+     resolver = GenericResolver.create(session, conf)

+ 

      koji_session = KojiModuleBuilder.get_session(conf, login=False)

      repos = koji_session.getExternalRepoList(tag)

      build_tags = find_build_tags_from_external_repos(koji_session, repos)
@@ -141,7 +143,7 @@ 

      for tag in build_tags:

          koji_tags = find_module_koji_tags(koji_session, tag)

          for koji_tag in koji_tags:

-             md = system_resolver.get_modulemd_by_koji_tag(koji_tag)

+             md = resolver.get_modulemd_by_koji_tag(koji_tag)

              if md:

                  modulemds.append(md)

              else:
@@ -149,7 +151,7 @@ 

      return modulemds

  

  

- def find_stream_collision_modules(buildrequired_modules, koji_tag):

+ def find_stream_collision_modules(session, buildrequired_modules, koji_tag):

      """

      Find buildrequired modules that are part of the ursine content represented

      by the koji_tag but with a different stream.
@@ -164,7 +166,7 @@ 

          found, an empty list is returned.

      :rtype: list[str]

      """

-     ursine_modulemds = get_modulemds_from_ursine_content(koji_tag)

+     ursine_modulemds = get_modulemds_from_ursine_content(session, koji_tag)

      if not ursine_modulemds:

          log.debug("No module metadata is found from ursine content.")

          return []
@@ -193,7 +195,7 @@ 

      return collision_modules

  

  

- def handle_stream_collision_modules(mmd):

+ def handle_stream_collision_modules(session, mmd):

      """

      Find out modules from ursine content and record those that are buildrequire

      module but have different stream. And finally, record built RPMs of these
@@ -243,13 +245,14 @@ 

              )

              continue

  

-         modules_nsvc = find_stream_collision_modules(buildrequires, base_module_info["koji_tag"])

+         modules_nsvc = find_stream_collision_modules(

+             session, buildrequires, base_module_info["koji_tag"])

  

          if modules_nsvc:

              # Save modules NSVC for later use in subsequent event handlers to

              # log readable messages.

              base_module_info["stream_collision_modules"] = modules_nsvc

-             base_module_info["ursine_rpms"] = find_module_built_rpms(modules_nsvc)

+             base_module_info["ursine_rpms"] = find_module_built_rpms(session, modules_nsvc)

          else:

              log.info("No stream collision module is found against base module %s.", module_name)

              # Always set in order to mark it as handled already.
@@ -259,7 +262,7 @@ 

      mmd.set_xmd(xmd)

  

  

- def find_module_built_rpms(modules_nsvc):

+ def find_module_built_rpms(session, modules_nsvc):

      """Find out built RPMs of given modules

  

      :param modules_nsvc: a list of modules' NSVC to find out built RPMs for
@@ -272,7 +275,7 @@ 

      from module_build_service.resolver import GenericResolver

      from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  

-     resolver = GenericResolver.create(conf)

+     resolver = GenericResolver.create(session, conf)

  

      built_rpms = []

      koji_session = KojiModuleBuilder.get_session(conf, login=False)

@@ -407,7 +407,8 @@ 

              self.validate_optional_params()

  

      def post(self):

-         return submit_module_build_from_scm(self.username, self.data, allow_local_url=False)

+         return submit_module_build_from_scm(

+             db.session, self.username, self.data, allow_local_url=False)

  

  

  class YAMLFileHandler(BaseHandler):
@@ -432,7 +433,7 @@ 

                  handle.filename = self.data["module_name"]

          else:

              handle = request.files["yaml"]

-         return submit_module_build_from_yaml(self.username, handle, self.data)

+         return submit_module_build_from_yaml(db.session, self.username, handle, self.data)

  

  

  def _dict_from_request(request):

file modified
+44 -23
@@ -118,7 +118,8 @@ 

      on_buildroot_add_repos_cb = None

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

+         self.db_session = db_session

          self.module_str = module

          self.tag_name = tag_name

          self.config = config
@@ -286,9 +287,9 @@ 

          return build_id, koji.BUILD_STATES["BUILDING"], reason, None

  

      @staticmethod

-     def get_disttag_srpm(disttag, module_build):

+     def get_disttag_srpm(session, disttag, module_build):

          # @FIXME

-         return KojiModuleBuilder.get_disttag_srpm(disttag, module_build)

+         return KojiModuleBuilder.get_disttag_srpm(session, disttag, module_build)

  

      def cancel_build(self, task_id):

          if FakeModuleBuilder.on_cancel_cb:
@@ -465,7 +466,8 @@ 

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          # All components should be built and module itself should be in "done"
@@ -517,7 +519,8 @@ 

          data = json.loads(rv.data)

          module_build_id = data["id"]

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
@@ -612,7 +615,8 @@ 

              data = json.loads(rv.data)

              assert data["id"] == 2

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

          assert models.ModuleBuild.query.first().state == models.BUILD_STATES["ready"]

  
@@ -662,7 +666,8 @@ 

          FakeModuleBuilder.on_finalize_cb = on_finalize_cb

  

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          # Because we did not finished single component build and canceled the
@@ -704,7 +709,8 @@ 

          FakeModuleBuilder.INSTANT_COMPLETE = True

  

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          # All components should be built and module itself should be in "done"
@@ -750,7 +756,8 @@ 

              Stop the scheduler when the module is built or when we try to build

              more components than the num_concurrent_builds.

              """

-             main_stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+             with models.make_session(conf) as session:

+                 main_stop = module_build_service.scheduler.make_simple_stop_condition(session)

              build_count = (

                  db.session.query(models.ComponentBuild).filter_by(

                      state=koji.BUILD_STATES["BUILDING"]
@@ -812,7 +819,8 @@ 

              Stop the scheduler when the module is built or when we try to build

              more components than the num_concurrent_builds.

              """

-             main_stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+             with models.make_session(conf) as session:

+                 main_stop = module_build_service.scheduler.make_simple_stop_condition(session)

              num_building = (

                  db.session.query(models.ComponentBuild)

                  .filter_by(state=koji.BUILD_STATES["BUILDING"])
@@ -885,7 +893,8 @@ 

          FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb

  

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
@@ -946,7 +955,8 @@ 

          FakeModuleBuilder.on_build_cb = on_build_cb

  

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
@@ -1011,7 +1021,8 @@ 

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

          msgs = [MBSModule("local module build", 3, 1)]

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          reused_component_ids = {
@@ -1081,7 +1092,8 @@ 

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

          msgs = [MBSModule("local module build", 3, 1)]

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          # All components should be built and module itself should be in "done"
@@ -1219,7 +1231,8 @@ 

  

          # Run the backend

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          # All components should be built and module itself should be in "done"
@@ -1344,7 +1357,8 @@ 

  

          # Run the backend

          msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main(msgs, stop)

  

          # All components should be built and module itself should be in "done"
@@ -1366,7 +1380,8 @@ 

          """

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

  

          with patch("module_build_service.utils.submit.format_mmd") as mock_format_mmd:

              mock_format_mmd.side_effect = Forbidden("Custom component repositories aren't allowed.")
@@ -1449,7 +1464,8 @@ 

          )

          assert rv.status_code == 201

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main([], stop)

          # Post again and make sure it fails

          rv2 = self.client.post(
@@ -1503,7 +1519,8 @@ 

          # make sure normal build has expected context without a suffix

          assert module_build.context == "9c690d0e"

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main([], stop)

          # Post again as a scratch build and make sure it succeeds

          post_data["scratch"] = True
@@ -1548,7 +1565,8 @@ 

          # make sure scratch build has expected context with unique suffix

          assert module_build.context == "9c690d0e_1"

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main([], stop)

          # Post again as a non-scratch build and make sure it succeeds

          post_data["scratch"] = False
@@ -1592,7 +1610,8 @@ 

          # make sure first scratch build has expected context with unique suffix

          assert module_build.context == "9c690d0e_1"

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main([], stop)

          # Post scratch build again and make sure it succeeds

          rv2 = self.client.post(post_url, data=json.dumps(post_data))
@@ -1708,7 +1727,8 @@ 

              assert set(dependencies.keys()) == set(["module-f28-build"])

  

          FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+         with models.make_session(conf) as session:

+             stop = module_build_service.scheduler.make_simple_stop_condition(session)

          module_build_service.scheduler.main([], stop)

  

          module = db.session.query(models.ModuleBuild).get(module_build_id)
@@ -1775,7 +1795,8 @@ 

              FakeModuleBuilder.DEFAULT_GROUPS = {"srpm-build": set(["bar"]), "build": set(["foo"])}

  

              msgs = []

-             stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+             with models.make_session(conf) as session:

+                 stop = module_build_service.scheduler.make_simple_stop_condition(session)

              module_build_service.scheduler.main(msgs, stop)

  

              # All components should be built and module itself should be in "done"

file modified
+19 -17
@@ -38,7 +38,7 @@ 

          self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()

  

      @patch("module_build_service.resolver.DBResolver")

-     @patch("module_build_service.resolver.GenericResolver")

+     @patch("module_build_service.builder.base.GenericResolver")

      def test_default_buildroot_groups_cache(self, generic_resolver, resolver):

          mbs_groups = {"buildroot": [], "srpm-buildroot": []}

  
@@ -48,27 +48,29 @@ 

  

          expected_groups = {"build": [], "srpm-build": []}

  

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             # Call default_buildroot_groups, the result should be cached.

-             ret = GenericBuilder.default_buildroot_groups(db.session, self.module)

-             assert ret == expected_groups

-             resolver.resolve_profiles.assert_called_once()

-             resolver.resolve_profiles.reset_mock()

+         generic_resolver.create.return_value = resolver

+ 

+         # Call default_buildroot_groups, the result should be cached.

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             ret = GenericBuilder.default_buildroot_groups(session, self.module)

+         assert ret == expected_groups

+         resolver.resolve_profiles.assert_called_once()

+         resolver.resolve_profiles.reset_mock()

  

          # Now try calling it again to verify resolve_profiles is not called,

          # because it is cached.

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             ret = GenericBuilder.default_buildroot_groups(db.session, self.module)

-             assert ret == expected_groups

-             resolver.resolve_profiles.assert_not_called()

-             resolver.resolve_profiles.reset_mock()

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             ret = GenericBuilder.default_buildroot_groups(session, self.module)

+         assert ret == expected_groups

+         resolver.resolve_profiles.assert_not_called()

+         resolver.resolve_profiles.reset_mock()

  

          # And now try clearing the cache and call it again.

-         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-             GenericBuilder.clear_cache(self.module)

-             ret = GenericBuilder.default_buildroot_groups(db.session, self.module)

-             assert ret == expected_groups

-             resolver.resolve_profiles.assert_called_once()

+         GenericBuilder.clear_cache(self.module)

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             ret = GenericBuilder.default_buildroot_groups(session, self.module)

+         assert ret == expected_groups

+         resolver.resolve_profiles.assert_called_once()

  

      def test_get_build_weights(self):

          weights = GenericBuilder.get_build_weights(["httpd", "apr"])

file modified
+101 -76
@@ -33,7 +33,8 @@ 

  import module_build_service.scheduler.handlers.repos

  import module_build_service.models

  import module_build_service.builder

- from module_build_service import Modulemd, db

+ from module_build_service import Modulemd, conf, db

+ from module_build_service.models import make_session

  from module_build_service.utils.general import mmd_to_str

  

  import pytest
@@ -139,13 +140,15 @@ 

      def test_recover_orphaned_artifact_when_tagged(self):

          """ Test recover_orphaned_artifact when the artifact is found and tagged in both tags

          """

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=[],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=[],

+             )

  

          builder.module_tag = {"name": "module-foo", "id": 1}

          builder.module_build_tag = {"name": "module-foo-build", "id": 2}
@@ -184,13 +187,15 @@ 

      def test_recover_orphaned_artifact_when_untagged(self):

          """ Tests recover_orphaned_artifact when the build is found but untagged

          """

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=[],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=[],

+             )

  

          builder.module_tag = {"name": "module-foo", "id": 1}

          builder.module_build_tag = {"name": "module-foo-build", "id": 2}
@@ -227,13 +232,15 @@ 

      def test_recover_orphaned_artifact_when_nothing_exists(self):

          """ Test recover_orphaned_artifact when the build is not found

          """

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=[],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=[],

+             )

  

          builder.module_tag = {"name": "module-foo", "id": 1}

          builder.module_build_tag = {"name": "module-foo-build", "id": 2}
@@ -259,13 +266,15 @@ 

  

          attrs = {"checkForBuilds.return_value": None, "checkForBuilds.side_effect": IOError}

          mocked_kojiutil.configure_mock(**attrs)

-         fake_kmb = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-nginx-1.2",

-             components=[],

-         )

+         with make_session(conf) as db_session:

+             fake_kmb = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-nginx-1.2",

+                 components=[],

+             )

          fake_kmb.module_target = {"build_tag": "module-fake_tag"}

  

          with pytest.raises(IOError):
@@ -285,13 +294,15 @@ 

              mmd.set_xmd(xmd)

              self.module.modulemd = mmd_to_str(mmd)

  

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-nginx-1.2",

-             components=[],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-nginx-1.2",

+                 components=[],

+             )

  

          builder.module_tag = {"name": "module-foo", "id": 1}

          builder.module_build_tag = {"name": "module-foo-build", "id": 2}
@@ -339,13 +350,15 @@ 

          ]

          mock_get_session.return_value = mock_session

          mock_get_tagged_nvrs.side_effect = [["foo", "bar"], ["foo"]]

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=[],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=[],

+             )

  

          builder.untag_artifacts(["foo", "bar"])

          assert mock_session.untagBuild.call_count == 3
@@ -507,13 +520,15 @@ 

  

          self.module.arches.append(module_build_service.models.ModuleArch(name="i686"))

  

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=["nginx"],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=["nginx"],

+             )

          session = builder.koji_session

  

          groups = OrderedDict()
@@ -585,13 +600,15 @@ 

              mmd.set_xmd(xmd)

              self.module.modulemd = mmd_to_str(mmd)

  

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=["nginx"],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=["nginx"],

+             )

          session = builder.koji_session

          FakeKojiModuleBuilder.tags = {}

  
@@ -614,13 +631,15 @@ 

          if scratch:

              self.module.scratch = scratch

  

-         builder = FakeKojiModuleBuilder(

-             owner=self.module.owner,

-             module=self.module,

-             config=conf,

-             tag_name="module-foo",

-             components=["nginx"],

-         )

+         with make_session(conf) as db_session:

+             builder = FakeKojiModuleBuilder(

+                 db_session=db_session,

+                 owner=self.module.owner,

+                 module=self.module,

+                 config=conf,

+                 tag_name="module-foo",

+                 components=["nginx"],

+             )

          session = builder.koji_session

          session.getBuildTarget = MagicMock()

          session.getBuildTarget.return_value = {}
@@ -689,7 +708,7 @@ 

          self.module.context = mmd.get_context()

          db.session.commit()

  

-         ret = KojiModuleBuilder.get_built_rpms_in_module_build(mmd)

+         ret = KojiModuleBuilder.get_built_rpms_in_module_build(db.session, mmd)

          assert set(ret) == set(["bar-2:1.30-4.el8+1308+551bfa71", "tar-2:1.30-4.el8+1308+551bfa71"])

          session.assert_not_called()

  
@@ -775,7 +794,9 @@ 

          )

          reuse_component_init_data()

          current_module = module_build_service.models.ModuleBuild.query.get(3)

-         rv = KojiModuleBuilder._get_filtered_rpms_on_self_dep(current_module, br_filtered_rpms)

+         with module_build_service.models.make_session(conf) as db_session:

+             rv = KojiModuleBuilder._get_filtered_rpms_on_self_dep(

+                 db_session, current_module, br_filtered_rpms)

          assert set(rv) == set(expected)

          session.assert_not_called()

  
@@ -795,14 +816,16 @@ 

                  new_callable=mock.PropertyMock,

                  return_value=cg_devel_enabled,

              ):

-                 builder = FakeKojiModuleBuilder(

-                     owner=self.module.owner,

-                     module=self.module,

-                     config=conf,

-                     tag_name="module-nginx-1.2",

-                     components=[],

-                 )

-                 builder.finalize()

+                 with make_session(conf) as session:

+                     builder = FakeKojiModuleBuilder(

+                         db_session=session,

+                         owner=self.module.owner,

+                         module=self.module,

+                         config=conf,

+                         tag_name="module-nginx-1.2",

+                         components=[],

+                     )

+                     builder.finalize()

  

          mock_koji_cg = mock_koji_cg_cls.return_value

          if cg_enabled:
@@ -824,7 +847,8 @@ 

      @patch.dict("sys.modules", krbV=MagicMock())

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):

-         builder = KojiModuleBuilder("owner", self.module, conf, "module-tag", [])

+         with make_session(conf) as session:

+             builder = KojiModuleBuilder(session, "owner", self.module, conf, "module-tag", [])

          builder.koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())
@@ -889,7 +913,8 @@ 

      @patch("module_build_service.builder.KojiModuleBuilder.execute_cmd")

      def _build_srpm(self, execute_cmd, mkdtemp):

          mkdtemp.return_value = self.tmp_srpm_build_dir

-         return KojiModuleBuilder.get_disttag_srpm("disttag", self.module_build)

+         with module_build_service.models.make_session(conf) as session:

+             return KojiModuleBuilder.get_disttag_srpm(session, "disttag", self.module_build)

  

      def test_return_srpm_file(self):

          srpm_file = self._build_srpm()

@@ -117,7 +117,7 @@ 

              module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])

  

              builder = MockModuleBuilder(

-                 "mcurlej", module, conf, module.koji_tag, module.component_builds

+                 session, "mcurlej", module, conf, module.koji_tag, module.component_builds

              )

              builder.resultsdir = self.resultdir

              rpms = [
@@ -145,7 +145,7 @@ 

              module = self._create_module_with_filters(session, 2, koji.BUILD_STATES["COMPLETE"])

  

              builder = MockModuleBuilder(

-                 "mcurlej", module, conf, module.koji_tag, module.component_builds

+                 session, "mcurlej", module, conf, module.koji_tag, module.component_builds

              )

              builder.resultsdir = self.resultdir

              rpms = [
@@ -171,7 +171,7 @@ 

              module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])

  

              builder = MockModuleBuilder(

-                 "mcurlej", module, conf, module.koji_tag, module.component_builds)

+                 session, "mcurlej", module, conf, module.koji_tag, module.component_builds)

              builder.resultsdir = self.resultdir

              rpms = []

              with mock.patch("os.listdir", return_value=rpms):
@@ -213,7 +213,8 @@ 

              mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value,

          ]

  

-         builder = MockModuleBuilder("user", self.app, conf, "module-app", [])

+         with make_session(conf) as session:

+             builder = MockModuleBuilder(session, "user", self.app, conf, "module-app", [])

  

          dependencies = {

              "repofile://": [self.platform.mmd()],

file modified
+10 -3
@@ -18,7 +18,7 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  import pytest

- from mock import patch, mock_open, ANY

+ from mock import patch, mock_open, ANY, Mock

  

  from module_build_service import conf

  from module_build_service.manage import retire, build_module_locally
@@ -128,11 +128,18 @@ 

      @patch("module_build_service.manage.submit_module_build_from_yaml")

      @patch("module_build_service.scheduler.main")

      @patch("module_build_service.manage.conf.set_item")

+     @patch("module_build_service.models.make_session")

      def test_build_module_locally_set_stream(

-             self, conf_set_item, main, submit_module_build_from_yaml, patched_open):

+             self, make_session, conf_set_item, main,

+             submit_module_build_from_yaml, patched_open):

+         mock_session = Mock()

+         make_session.return_value.__enter__.return_value = mock_session

+ 

          build_module_locally(

              yaml_file="./fake.yaml", default_streams=["platform:el8"], stream="foo")

  

          submit_module_build_from_yaml.assert_called_once_with(

-             ANY, ANY, {"default_streams": {"platform": "el8"}, "local_build": True},

+             mock_session, ANY, ANY, {

+                 "default_streams": {"platform": "el8"}, "local_build": True

+             },

              skiptests=False, stream="foo")

file modified
+47 -38
@@ -27,7 +27,7 @@ 

  import pytest

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service import app, db, models, utils, Modulemd

+ from module_build_service import db, models, utils, Modulemd, conf

  from module_build_service.utils import import_mmd, load_mmd_file, mmd_to_str

  from module_build_service.models import ModuleBuild

  import tests
@@ -67,18 +67,20 @@ 

          db.session.add(build)

          db.session.commit()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.get_buildrequired_modulemds(

-             "testmodule", "master", platform_f300103.mmd().get_nsvc())

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             result = resolver.get_buildrequired_modulemds(

+                 "testmodule", "master", platform_f300103.mmd().get_nsvc())

          nsvcs = set([m.get_nsvc() for m in result])

          assert nsvcs == set(["testmodule:master:20170109091357:123"])

  

      @pytest.mark.parametrize("stream_versions", [False, True])

      def test_get_module_modulemds_stream_versions(self, stream_versions):

          tests.init_data(1, multiple_stream_versions=True)

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.get_module_modulemds(

-             "platform", "f29.1.0", stream_version_lte=stream_versions)

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             result = resolver.get_module_modulemds(

+                 "platform", "f29.1.0", stream_version_lte=stream_versions)

          nsvcs = set([mmd.get_nsvc() for mmd in result])

          if stream_versions:

              assert nsvcs == set(["platform:f29.1.0:3:00000000", "platform:f29.0.0:3:00000000"])
@@ -104,9 +106,10 @@ 

              module.modulemd = mmd_to_str(mmd)

              db.session.add(module)

              db.session.commit()

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.get_module_build_dependencies(

-             "testmodule", "master", "20170109091357", "78e4a6fd").keys()

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             result = resolver.get_module_build_dependencies(

+                 "testmodule", "master", "20170109091357", "78e4a6fd").keys()

          assert set(result) == expected

  

      def test_get_module_build_dependencies_recursive(self):
@@ -135,9 +138,10 @@ 

          module.version = str(mmd.get_version())

          module.koji_tag = "module-ae2adf69caf0e1b6"

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.get_module_build_dependencies(

-             "testmodule2", "master", "20180123171545", "c40c156c").keys()

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             result = resolver.get_module_build_dependencies(

+                 "testmodule2", "master", "20180123171545", "c40c156c").keys()

          assert set(result) == set(["module-f28-build"])

  

      @patch(
@@ -152,24 +156,25 @@ 

          """

          Tests that it returns the requires of the buildrequires recursively

          """

-         with app.app_context():

-             utils.load_local_builds(["platform", "parent", "child", "testmodule"])

+         utils.load_local_builds(["platform", "parent", "child", "testmodule"])

  

-             build = models.ModuleBuild.local_modules(db.session, "child", "master")

-             resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

+         build = models.ModuleBuild.local_modules(db.session, "child", "master")

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

              result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()

  

-             local_path = os.path.join(base_dir, "staged_data", "local_builds")

+         local_path = os.path.join(base_dir, "staged_data", "local_builds")

  

-             expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]

-             assert set(result) == set(expected)

+         expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]

+         assert set(result) == set(expected)

  

      def test_resolve_requires(self):

          build = models.ModuleBuild.query.get(2)

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.resolve_requires(

-             [":".join([build.name, build.stream, build.version, build.context])]

-         )

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             result = resolver.resolve_requires(

+                 [":".join([build.name, build.stream, build.version, build.context])]

+             )

  

          assert result == {

              "testmodule": {
@@ -186,8 +191,9 @@ 

          Tests that the profiles get resolved recursively

          """

          mmd = models.ModuleBuild.query.get(2).mmd()

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

          expected = {

              "buildroot": set([

                  "unzip",
@@ -239,27 +245,30 @@ 

          """

          Test that profiles get resolved recursively on local builds

          """

-         with app.app_context():

-             utils.load_local_builds(["platform"])

-             mmd = models.ModuleBuild.query.get(2).mmd()

-             resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         utils.load_local_builds(["platform"])

+         mmd = models.ModuleBuild.query.get(2).mmd()

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

              result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

-             expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

-             assert result == expected

+         expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

+         assert result == expected

  

      def test_get_latest_with_virtual_stream(self):

          tests.init_data(1, multiple_stream_versions=True)

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         mmd = resolver.get_latest_with_virtual_stream("platform", "f29")

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             mmd = resolver.get_latest_with_virtual_stream("platform", "f29")

          assert mmd

          assert mmd.get_stream_name() == "f29.2.0"

  

      def test_get_latest_with_virtual_stream_none(self):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")

          assert not mmd

  

      def test_get_module_count(self):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         count = resolver.get_module_count(name="platform", stream="f28")

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="db")

+             count = resolver.get_module_count(name="platform", stream="f28")

          assert count == 1

@@ -24,7 +24,7 @@ 

  from datetime import datetime

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service import db

+ from module_build_service import conf, db, models

  from module_build_service.utils.general import import_mmd, load_mmd_file, mmd_to_str

  from module_build_service.models import ModuleBuild

  import tests
@@ -65,9 +65,10 @@ 

          db.session.add(build)

          db.session.commit()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="local")

-         result = resolver.get_buildrequired_modulemds(

-             "testmodule", "master", platform_f8.mmd().get_nsvc())

+         with models.make_session(conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="local")

+             result = resolver.get_buildrequired_modulemds(

+                 "testmodule", "master", platform_f8.mmd().get_nsvc())

          nsvcs = set([m.get_nsvc() for m in result])

          assert nsvcs == set(

              ["testmodule:master:20170109091357:9c690d0e", "testmodule:master:20170109091357:123"])

file modified
+81 -68
@@ -53,10 +53,12 @@ 

  

          mock_session().get.return_value = mock_res

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         module_mmds = resolver.get_module_modulemds(

-             "testmodule", "master", "20180205135154", "9c690d0e", virtual_streams=["f28"]

-         )

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             module_mmds = resolver.get_module_modulemds(

+                 "testmodule", "master", "20180205135154", "9c690d0e",

+                 virtual_streams=["f28"]

+             )

          nsvcs = set(

              m.get_nsvc()

              for m in module_mmds
@@ -109,8 +111,9 @@ 

          }

  

          mock_session().get.return_value = mock_res

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         ret = resolver.get_module_modulemds("testmodule", "master", version)

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             ret = resolver.get_module_modulemds("testmodule", "master", version)

          nsvcs = set(

              m.get_nsvc()

              for m in ret
@@ -172,9 +175,10 @@ 

  

          mock_session().get.return_value = mock_res

          expected = set(["module-f28-build"])

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         result = resolver.get_module_build_dependencies(

-             "testmodule", "master", "20180205135154", "9c690d0e").keys()

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             result = resolver.get_module_build_dependencies(

+                 "testmodule", "master", "20180205135154", "9c690d0e").keys()

  

          expected_queries = [

              {
@@ -245,10 +249,11 @@ 

  

          expected = set()

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         result = resolver.get_module_build_dependencies(

-             "testmodule", "master", "20180205135154", "9c690d0e"

-         ).keys()

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             result = resolver.get_module_build_dependencies(

+                 "testmodule", "master", "20180205135154", "9c690d0e"

+             ).keys()

          mbs_url = tests.conf.mbs_url

          expected_query = {

              "name": "testmodule",
@@ -283,10 +288,10 @@ 

          }

  

          mock_session().get.return_value = mock_res

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         result = resolver.resolve_profiles(

-             formatted_testmodule_mmd, ("buildroot", "srpm-buildroot")

-         )

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             result = resolver.resolve_profiles(

+                 formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))

          expected = {

              "buildroot": set([

                  "unzip",
@@ -353,62 +358,68 @@ 

          self, local_builds, conf_system, formatted_testmodule_mmd

      ):

          tests.clean_database()

-         with tests.app.app_context():

-             module_build_service.utils.load_local_builds(["platform"])

+         # with tests.app.app_context():

+         module_build_service.utils.load_local_builds(["platform"])

  

-             resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

              result = resolver.resolve_profiles(

                  formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))

-             expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

-             assert result == expected

+         expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

+         assert result == expected

  

      def test_get_empty_buildrequired_modulemds(self):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         with module_build_service.models.make_session(module_build_service.conf) as db_session:

+             resolver = mbs_resolver.GenericResolver.create(

+                 db_session, tests.conf, backend="mbs")

  

-         with patch.object(resolver, "session") as session:

-             session.get.return_value = Mock(ok=True)

-             session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}

+             with patch.object(resolver, "session") as session:

+                 session.get.return_value = Mock(ok=True)

+                 session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}

  

-             result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")

-             assert [] == result

+                 result = resolver.get_buildrequired_modulemds(

+                     "nodejs", "10", "platform:el8:1:00000000")

+                 assert [] == result

  

      def test_get_buildrequired_modulemds(self):

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

- 

-         with patch.object(resolver, "session") as session:

-             session.get.return_value = Mock(ok=True)

-             session.get.return_value.json.return_value = {

-                 "items": [

-                     {

-                         "name": "nodejs",

-                         "stream": "10",

-                         "version": 1,

-                         "context": "c1",

-                         "modulemd": mmd_to_str(

-                             tests.make_module("nodejs:10:1:c1", store_to_db=False),

-                         ),

-                     },

-                     {

-                         "name": "nodejs",

-                         "stream": "10",

-                         "version": 2,

-                         "context": "c1",

-                         "modulemd": mmd_to_str(

-                             tests.make_module("nodejs:10:2:c1", store_to_db=False),

-                         ),

-                     },

-                 ],

-                 "meta": {"next": None},

-             }

- 

-             result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")

- 

-             assert 1 == len(result)

-             mmd = result[0]

-             assert "nodejs" == mmd.get_module_name()

-             assert "10" == mmd.get_stream_name()

-             assert 1 == mmd.get_version()

-             assert "c1" == mmd.get_context()

+         with module_build_service.models.make_session(module_build_service.conf) as db_session:

+             resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")

+ 

+             with patch.object(resolver, "session") as session:

+                 session.get.return_value = Mock(ok=True)

+                 session.get.return_value.json.return_value = {

+                                                                   "items": [

+                                                                   {

+                                                                   "name": "nodejs",

+                                                                   "stream": "10",

+                                                                   "version": 1,

+                                                                   "context": "c1",

+                                                                   "modulemd": mmd_to_str(

+                                                                   tests.make_module("nodejs:10:1:c1", store_to_db=False),

+                                                                   ),

+                                                                   },

+                                                                   {

+                                                                   "name": "nodejs",

+                                                                   "stream": "10",

+                                                                   "version": 2,

+                                                                   "context": "c1",

+                                                                   "modulemd": mmd_to_str(

+                                                                   tests.make_module("nodejs:10:2:c1", store_to_db=False),

+                                                                   ),

+                                                                   },

+                                                                   ],

+                                                                   "meta": {"next": None},

+                                                                      }

+ 

+                 result = resolver.get_buildrequired_modulemds(

+                     "nodejs", "10", "platform:el8:1:00000000")

+ 

+                 assert 1 == len(result)

+                 mmd = result[0]

+                 assert "nodejs" == mmd.get_module_name()

+                 assert "10" == mmd.get_stream_name()

+                 assert 1 == mmd.get_version()

+                 assert "c1" == mmd.get_context()

  

      @patch("requests.Session")

      def test_get_module_count(self, mock_session):
@@ -420,8 +431,9 @@ 

          }

          mock_session.return_value.get.return_value = mock_res

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         count = resolver.get_module_count(name="platform", stream="f28")

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             count = resolver.get_module_count(name="platform", stream="f28")

  

          assert count == 5

          mock_session.return_value.get.assert_called_once_with(
@@ -447,8 +459,9 @@ 

          }

          mock_session.return_value.get.return_value = mock_res

  

-         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-         mmd = resolver.get_latest_with_virtual_stream("platform", "virtualf28")

+         with module_build_service.models.make_session(module_build_service.conf) as session:

+             resolver = mbs_resolver.GenericResolver.create(session, tests.conf, backend="mbs")

+             mmd = resolver.get_latest_with_virtual_stream("platform", "virtualf28")

  

          assert mmd.get_module_name() == "platform"

          mock_session.return_value.get.assert_called_once_with(

@@ -51,7 +51,8 @@ 

  

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      @patch("module_build_service.models.ModuleBuild.from_module_event")

-     def test_init_basic(self, from_module_event, create_builder):

+     @patch("module_build_service.scheduler.handlers.modules.GenericResolver")

+     def test_init_basic(self, generic_resolver, from_module_event, create_builder):

          builder = mock.Mock()

          builder.get_disttag_srpm.return_value = "some srpm disttag"

          builder.build.return_value = 1234, 1, "", None
@@ -83,8 +84,7 @@ 

  

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=1, module_build_state="some state")

-         with patch.object(module_build_service.resolver, "system_resolver"):

-             self.fn(config=self.config, session=self.session, msg=msg)

+         self.fn(config=self.config, session=self.session, msg=msg)

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -92,7 +92,7 @@ 

      )

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      @patch("module_build_service.resolver.DBResolver")

-     @patch("module_build_service.resolver.GenericResolver")

+     @patch("module_build_service.scheduler.handlers.modules.GenericResolver")

      def test_new_repo_called_when_macros_reused(

          self, generic_resolver, resolver, create_builder, dbg

      ):
@@ -120,12 +120,12 @@ 

              resolver.backend = "db"

              resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state")

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db.session, msg=msg)

-                 koji_session.newRepo.assert_called_once_with("module-123-build")

+             generic_resolver.create.return_value = resolver

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state")

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, session=db.session, msg=msg)

+             koji_session.newRepo.assert_called_once_with("module-123-build")

  

              # When module-build-macros is reused, it still has to appear only

              # once in database.
@@ -142,7 +142,7 @@ 

      )

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      @patch("module_build_service.resolver.DBResolver")

-     @patch("module_build_service.resolver.GenericResolver")

+     @patch("module_build_service.scheduler.handlers.modules.GenericResolver")

      def test_new_repo_not_called_when_macros_not_reused(

          self, generic_resolver, resolver, create_builder, dbg

      ):
@@ -170,12 +170,12 @@ 

              resolver.backend = "db"

              resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state")

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db.session, msg=msg)

-                 assert koji_session.newRepo.called

+             generic_resolver.create.return_value = resolver

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state")

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, session=db.session, msg=msg)

+             assert koji_session.newRepo.called

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -183,7 +183,7 @@ 

      )

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      @patch("module_build_service.resolver.DBResolver")

-     @patch("module_build_service.resolver.GenericResolver")

+     @patch("module_build_service.scheduler.handlers.modules.GenericResolver")

      def test_set_cg_build_koji_tag_fallback_to_default(

          self, generic_resolver, resolver, create_builder, dbg

      ):
@@ -216,13 +216,13 @@ 

                  "module-bootstrap-tag": [base_mmd]

              }

  

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state")

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db.session, msg=msg)

-                 module_build = ModuleBuild.query.filter_by(id=2).one()

-                 assert module_build.cg_build_koji_tag == "modular-updates-candidate"

+             generic_resolver.create.return_value = resolver

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state")

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, session=db.session, msg=msg)

+             module_build = ModuleBuild.query.filter_by(id=2).one()

+             assert module_build.cg_build_koji_tag == "modular-updates-candidate"

  

      @pytest.mark.parametrize(

          "koji_cg_tag_build,expected_cg_koji_build_tag",
@@ -237,7 +237,7 @@ 

      )

      @patch("module_build_service.builder.GenericBuilder.create_from_module")

      @patch("module_build_service.resolver.DBResolver")

-     @patch("module_build_service.resolver.GenericResolver")

+     @patch("module_build_service.scheduler.handlers.modules.GenericResolver")

      @patch(

          "module_build_service.config.Config.base_module_names",

          new_callable=mock.PropertyMock,
@@ -287,12 +287,12 @@ 

                  "koji_cg_tag_build",

                  new=koji_cg_tag_build,

              ):

-                 with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                     msg = module_build_service.messaging.MBSModule(

-                         msg_id=None, module_build_id=2, module_build_state="some state"

-                     )

-                     module_build_service.scheduler.handlers.modules.wait(

-                         config=conf, session=db.session, msg=msg

-                     )

-                     module_build = ModuleBuild.query.filter_by(id=2).one()

-                     assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

+                 generic_resolver.create.return_value = resolver

+                 msg = module_build_service.messaging.MBSModule(

+                     msg_id=None, module_build_id=2, module_build_state="some state"

+                 )

+                 module_build_service.scheduler.handlers.modules.wait(

+                     config=conf, session=db.session, msg=msg

+                 )

+                 module_build = ModuleBuild.query.filter_by(id=2).one()

+                 assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

file modified
+21 -11
@@ -19,7 +19,7 @@ 

  # SOFTWARE.

  from mock import patch, Mock

  

- from module_build_service import conf

+ from module_build_service import conf, models

  from module_build_service.utils import ursine

  from tests import make_module, clean_database

  
@@ -140,7 +140,8 @@ 

              "url": "http://example.com/repos/tag-4-build/latest/$arch/",

          }]

  

-         modulemds = ursine.get_modulemds_from_ursine_content("tag")

+         with models.make_session(conf) as db_session:

+             modulemds = ursine.get_modulemds_from_ursine_content(db_session, "tag")

          assert [] == modulemds

  

      @patch.object(conf, "koji_tag_prefixes", new=["module"])
@@ -179,7 +180,8 @@ 

  

          koji_tag = "tag"  # It's ok to use arbitrary tag name.

          with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):

-             modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)

+             with models.make_session(conf) as db_session:

+                 modulemds = ursine.get_modulemds_from_ursine_content(db_session, koji_tag)

  

          test_nsvcs = [item.get_nsvc() for item in modulemds]

          test_nsvcs.sort()
@@ -204,7 +206,8 @@ 

          original_xmd = fake_mmd.get_xmd()

  

          with patch.object(ursine, "log") as log:

-             ursine.handle_stream_collision_modules(fake_mmd)

+             with models.make_session(conf) as session:

+                 ursine.handle_stream_collision_modules(session, fake_mmd)

              assert 2 == log.info.call_count

              find_stream_collision_modules.assert_not_called()

  
@@ -229,8 +232,9 @@ 

          get_modulemds_from_ursine_content.return_value = []

  

          with patch.object(ursine, "log") as log:

-             ursine.handle_stream_collision_modules(fake_mmd)

-             assert 2 == log.info.call_count

+             with models.make_session(conf) as session:

+                 ursine.handle_stream_collision_modules(session, fake_mmd)

+                 assert 2 == log.info.call_count

  

          # Ensure stream_collision_modules is set.

          expected_xmd["mbs"]["buildrequires"]["platform"]["stream_collision_modules"] = ""
@@ -260,7 +264,7 @@ 

          }

          fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)

  

-         def mock_get_ursine_modulemds(koji_tag):

+         def mock_get_ursine_modulemds(session, koji_tag):

              if koji_tag == "module-rhel-8.0-build":

                  return [

                      # This is the one
@@ -309,7 +313,8 @@ 

          koji_session = ClientSession.return_value

          koji_session.listTaggedRPMS.side_effect = mock_listTaggedRPMS

  

-         ursine.handle_stream_collision_modules(fake_mmd)

+         with models.make_session(conf) as session:

+             ursine.handle_stream_collision_modules(session, fake_mmd)

  

          xmd = fake_mmd.get_xmd()

          buildrequires = xmd["mbs"]["buildrequires"]
@@ -332,7 +337,8 @@ 

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_no_modulemds_found_from_ursine_content(self, get_modulemds_from_ursine_content):

          get_modulemds_from_ursine_content.return_value = []

-         assert not ursine.find_stream_collision_modules({}, "koji_tag")

+         with models.make_session(conf) as session:

+             assert not ursine.find_stream_collision_modules(session, {}, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_no_collisions_found(self, get_modulemds_from_ursine_content):
@@ -342,7 +348,9 @@ 

              make_module("modules:2:1:c2", store_to_db=False),

              make_module("modulet:3:1:c3", store_to_db=False),

          ]

-         assert [] == ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")

+         with models.make_session(conf) as session:

+             assert [] == ursine.find_stream_collision_modules(

+                 session, xmd_mbs_buildrequires, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_collision_modules_are_found(self, get_modulemds_from_ursine_content):
@@ -354,5 +362,7 @@ 

          ]

          get_modulemds_from_ursine_content.return_value = fake_modules

  

-         modules = ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")

+         with models.make_session(conf) as session:

+             modules = ursine.find_stream_collision_modules(

+                 session, xmd_mbs_buildrequires, "koji_tag")

          assert [fake_modules[1].get_nsvc()] == modules

@@ -293,11 +293,12 @@ 

  

          with open(modulemd_file_path, "rb") as fd:

              handle = FileStorage(fd)

-             module_build_service.utils.submit_module_build_from_yaml(

-                 username, handle, {}, stream=stream, skiptests=True)

+             with models.make_session(conf) as session:

+                 module_build_service.utils.submit_module_build_from_yaml(

+                     session, username, handle, {}, stream=stream, skiptests=True)

              mock_submit_args = mock_submit.call_args[0]

-             username_arg = mock_submit_args[0]

-             mmd_arg = mock_submit_args[1]

+             username_arg = mock_submit_args[1]

+             mmd_arg = mock_submit_args[2]

              assert mmd_arg.get_stream_name() == stream

              assert "\n\n%__spec_check_pre exit 0\n" in mmd_arg.get_buildopts().get_rpm_macros()

              assert username_arg == username
@@ -931,7 +932,8 @@ 

          # Create a copy of mmd1 without xmd.mbs, since that will cause validate_mmd to fail

          mmd1_copy = mmd1.copy()

          mmd1_copy.set_xmd({})

-         builds = module_build_service.utils.submit_module_build("foo", mmd1_copy, {})

+         with models.make_session(conf) as session:

+             builds = module_build_service.utils.submit_module_build(session, "foo", mmd1_copy, {})

          ret = {b.mmd().get_context(): b.state for b in builds}

          assert ret == {"c1": models.BUILD_STATES["ready"], "c2": models.BUILD_STATES["init"]}

  
@@ -950,7 +952,8 @@ 

      TAGGED_COMPONENTS = []

  

      @module_build_service.utils.validate_koji_tag("tag_name")

-     def __init__(self, owner, module, config, tag_name, components):

+     def __init__(self, db_session, owner, module, config, tag_name, components):

+         self.db_session = db_session

          self.module_str = module

          self.tag_name = tag_name

          self.config = config
@@ -993,9 +996,9 @@ 

          return DummyModuleBuilder._build_id, state, reason, None

  

      @staticmethod

-     def get_disttag_srpm(disttag, module_build):

+     def get_disttag_srpm(session, disttag, module_build):

          # @FIXME

-         return KojiModuleBuilder.get_disttag_srpm(disttag, module_build)

+         return KojiModuleBuilder.get_disttag_srpm(session, disttag, module_build)

  

      def cancel_build(self, task_id):

          pass

@@ -24,7 +24,7 @@ 

  import pytest

  

  import module_build_service.utils

- from module_build_service import Modulemd

+ from module_build_service import conf, models, Modulemd

  from module_build_service.errors import StreamAmbigous

  from tests import db, clean_database, make_module, init_data, base_dir

  
@@ -44,7 +44,9 @@ 

          """

          mmd = module_build.mmd()

          module_build_service.utils.expand_mse_streams(db.session, mmd)

-         modules = module_build_service.utils.get_mmds_required_by_module_recursively(mmd)

+         with models.make_session(conf) as session:

+             modules = module_build_service.utils.get_mmds_required_by_module_recursively(

+                 session, mmd)

          nsvcs = [

              m.get_nsvc()

              for m in modules
@@ -413,7 +415,8 @@ 

          mmd.remove_dependencies(deps)

          mmd.add_dependencies(new_deps)

  

-         mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

+         with models.make_session(conf) as session:

+             mmds = module_build_service.utils.mse._get_base_module_mmds(session, mmd)

          expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"])

          # Verify no duplicates were returned before doing set operations

          assert len(mmds) == len(expected)
@@ -439,7 +442,8 @@ 

  

          make_module("platform:lp29.1.1:12:c11", {}, {}, virtual_streams=virtual_streams)

  

-         mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

+         with models.make_session(conf) as session:

+             mmds = module_build_service.utils.mse._get_base_module_mmds(session, mmd)

          if virtual_streams == ["f29"]:

              expected = set(

                  ["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0", "platform:lp29.1.1"])
@@ -470,7 +474,8 @@ 

          mmd.remove_dependencies(deps)

          mmd.add_dependencies(new_deps)

  

-         mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

+         with models.make_session(conf) as session:

+             mmds = module_build_service.utils.mse._get_base_module_mmds(session, mmd)

          expected = set(["platform:foo28", "platform:foo29", "platform:foo30"])

  

          # Verify no duplicates were returned before doing set operations

Signed-off-by: Chenxiong Qi cqi@redhat.com

@mprahl Please have a look firstly. This is for FACTORY-4509. After I fix test issues for running with PostgreSQL, I'll test these changes to ensure they work with PostgreSQL. And I'll also add more detailed information to commit message about these changes.

Build 6468e43 FAILED!
Rebase or make new commits to rebuild.

@cqi I took a quick glance it looked good. My preference would be for us to standardize on a parameter name for the database session (db_session vs session).

@jkaluza do you have any opinions here? Is there any danger in keeping some of these database sessions open longer? I somehow remember you running into any issue with that in ODCS. If there are no issues, it should at least reduce overhead of creating database sessions over and over again.

@cqi could you also take a look at the back-end code under module_build_service/scheduler so that it doesn't contain any questions like models.ComponentBuild.query since that is Flask-SQLAlchemy querying syntax. It should be db_session.query(models.ComponentBuild) instead.

Once that is addressed, it may not be necessary for make_session to establish a Flask context.

Pull-Request has been closed by cqi

4 years ago
Changes Summary 30
+76 -73
file changed
module_build_service/builder/KojiModuleBuilder.py
+20 -20
file changed
module_build_service/builder/MockModuleBuilder.py
+10 -10
file changed
module_build_service/builder/base.py
+76 -62
file changed
module_build_service/manage.py
+18 -31
file changed
module_build_service/models.py
+227 -224
file changed
module_build_service/resolver/DBResolver.py
+1 -0
file changed
module_build_service/resolver/LocalResolver.py
+19 -7
file changed
module_build_service/resolver/MBSResolver.py
+0 -5
file changed
module_build_service/resolver/__init__.py
+2 -2
file changed
module_build_service/resolver/base.py
+2 -0
file changed
module_build_service/scheduler/__init__.py
+10 -6
file changed
module_build_service/scheduler/handlers/modules.py
+1 -0
file changed
module_build_service/scheduler/handlers/repos.py
+1 -0
file changed
module_build_service/utils/general.py
+22 -23
file changed
module_build_service/utils/mse.py
+30 -27
file changed
module_build_service/utils/submit.py
+13 -10
file changed
module_build_service/utils/ursine.py
+3 -2
file changed
module_build_service/views.py
+44 -23
file changed
tests/test_build/test_build.py
+19 -17
file changed
tests/test_builder/test_base.py
+101 -76
file changed
tests/test_builder/test_koji.py
+5 -4
file changed
tests/test_builder/test_mock.py
+10 -3
file changed
tests/test_manage.py
+47 -38
file changed
tests/test_resolver/test_db.py
+5 -4
file changed
tests/test_resolver/test_local.py
+81 -68
file changed
tests/test_resolver/test_mbs.py
+35 -35
file changed
tests/test_scheduler/test_module_wait.py
+21 -11
file changed
tests/test_utils/test_ursine.py
+11 -8
file changed
tests/test_utils/test_utils.py
+10 -5
file changed
tests/test_utils/test_utils_mse.py