#1661 MBSResolver - caching, tests, fixes
Merged 2 years ago by breilly. Opened 3 years ago by otaylor.
otaylor/fm-orchestrator mbs-resolver-performance  into  master

@@ -82,6 +82,9 @@ 

      RPMS_ALLOW_REPOSITORY = True

      MODULES_ALLOW_REPOSITORY = True

  

+     # Match the Fedora server-side configuration

+     ALLOW_ONLY_COMPATIBLE_BASE_MODULES = False

+ 

      # Celery tasks will be executed locally for local builds

      CELERY_TASK_ALWAYS_EAGER = True

  
@@ -440,8 +443,12 @@ 

              "type": bool,

              "default": True,

              "desc": "When True, only modules built on top of compatible base modules are "

-                     "considered by MBS as possible buildrequirement. When False, modules "

-                     "built against any base module stream can be used as a buildrequire.",

+                     "considered by MBS as possible buildrequirement or as a reuse candidate. "

+                     "When False, modules built against any base module stream sharing a "

+                     "virtual stream can be used as a buildrequire, but only modules built "

+                     "against the exact same base module stream are considered as candidates "

+                     "for reuse. Setting this to True requires base module streams to be of the "

+                     " form: <prefix><x>.<y>.<z>.",

          },

          "base_module_stream_exclusions": {

              "type": list,

@@ -21,6 +21,10 @@ 

      pass

  

  

+ class StreamNotXyz(UnprocessableEntity):

+     pass

+ 

+ 

  class Conflict(ValueError):

      pass

  

@@ -1,6 +1,7 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

  from __future__ import absolute_import, print_function

+ from collections import defaultdict

  from functools import wraps

  import getpass

  import logging
@@ -16,7 +17,7 @@ 

      import_builds_from_local_dnf_repos, load_local_builds

  )

  from module_build_service.common import conf, models

- from module_build_service.common.errors import StreamAmbigous

+ from module_build_service.common.errors import StreamAmbigous, StreamNotXyz

  from module_build_service.common.logger import level_flags

  from module_build_service.common.utils import load_mmd_file, import_mmd

  import module_build_service.scheduler.consumer
@@ -103,11 +104,33 @@ 

      import_mmd(db.session, mmd)

  

  

+ def collect_dep_overrides(overrides):

+     collected = defaultdict(list)

+     for value in overrides:

+         parts = value.split(":")

+         if len(parts) != 2:

+             raise ValueError("dependency overrides must be in the form name:stream")

+         name, stream = parts

+         collected[name].append(stream)

+ 

+     return collected

+ 

+ 

  @manager.option("--stream", action="store", dest="stream")

  @manager.option("--file", action="store", dest="yaml_file")

  @manager.option("--srpm", action="append", default=[], dest="srpms", metavar="SRPM")

  @manager.option("--skiptests", action="store_true", dest="skiptests")

  @manager.option("--offline", action="store_true", dest="offline")

+ @manager.option(

+     '--buildrequires', action='append', metavar='name:stream',

+     dest='buildrequires', default=[],

+     help='Buildrequires to override in the form of "name:stream"'

+ )

+ @manager.option(

+     '--requires', action='append', metavar='name:stream',

+     dest='requires', default=[],

+     help='Requires to override in the form of "name:stream"'

+ )

  @manager.option("-d", "--debug", action="store_true", dest="log_debug")

  @manager.option("-l", "--add-local-build", action="append", default=None, dest="local_build_nsvs")

  @manager.option("-s", "--set-stream", action="append", default=[], dest="default_streams")
@@ -125,6 +148,8 @@ 

      offline=False,

      platform_repofiles=None,

      platform_id=None,

+     requires=None,

+     buildrequires=None,

      log_debug=False,

  ):

      """ Performs local module build using Mock
@@ -163,7 +188,9 @@ 

  

      params = {

          "local_build": True,

-         "default_streams": dict(ns.split(":") for ns in default_streams)

+         "default_streams": dict(ns.split(":") for ns in default_streams),

+         "require_overrides": collect_dep_overrides(requires),

+         "buildrequire_overrides": collect_dep_overrides(buildrequires),

      }

      if srpms:

          params["srpms"] = srpms
@@ -190,7 +217,11 @@ 

          except StreamAmbigous as e:

              logging.error(str(e))

              logging.error("Use '-s module_name:module_stream' to choose the stream")

-             return

+             return 1

+         except StreamNotXyz as e:

+             logging.error(str(e))

+             logging.error("Use '--buildrequires name:stream' to override the base module stream")

+             return 1

  

          module_build_ids = [build.id for build in module_builds]

  

@@ -6,7 +6,7 @@ 

  from sqlalchemy.orm import aliased

  

  from module_build_service.common import log, models

- from module_build_service.common.errors import UnprocessableEntity

+ from module_build_service.common.errors import StreamNotXyz, UnprocessableEntity

  from module_build_service.common.utils import load_mmd

  from module_build_service.resolver.base import GenericResolver

  
@@ -23,11 +23,10 @@ 

          self.config = config

  

      def get_module(

-         self, name, stream, version, context,

-         state=models.BUILD_STATES["ready"], strict=False

+         self, name, stream, version, context, strict=False

      ):

          mb = models.ModuleBuild.get_build_from_nsvc(

-             self.db_session, name, stream, version, context, state=state)

+             self.db_session, name, stream, version, context, state=models.BUILD_STATES["ready"])

          if mb:

              return mb.extended_json(self.db_session)

  
@@ -111,26 +110,31 @@ 

          :param stream_version_lte: If True, the compatible streams are limited

               by the stream version computed from `stream`. If False, even the

               modules with higher stream version are returned.

-         :param virtual_streams: List of virtual streams. If set, also modules

-             with incompatible stream version are returned in case they share

-             one of the virtual streams.

+         :param virtual_streams: List of virtual streams.

+             Modules are returned if they share one of the virtual streams.

          :param states: List of states the returned compatible modules should

              be in.

          :return list: List of Modulemd objects.

          """

+         if not virtual_streams:

+             raise RuntimeError("Virtual stream list must not be empty")

+ 

          name = base_module_mmd.get_module_name()

          stream = base_module_mmd.get_stream_name()

          builds = []

+ 

          stream_version = None

          if stream_version_lte:

              stream_in_xyz_format = len(str(models.ModuleBuild.get_stream_version(

                  stream, right_pad=False))) >= 5

-             if stream_in_xyz_format:

-                 stream_version = models.ModuleBuild.get_stream_version(stream)

-             else:

-                 log.warning(

-                     "Cannot get compatible base modules, because stream_version_lte is used, "

-                     "but stream %r is not in x.y.z format." % stream)

+             if not stream_in_xyz_format:

+                 raise StreamNotXyz(

+                     "Cannot get compatible base modules, because stream of resolved "

+                     "base module %s:%s is not in x.y.z format." % (

+                         base_module_mmd.get_module_name(), stream

+                     ))

+             stream_version = models.ModuleBuild.get_stream_version(stream)

+ 

          builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(

              self.db_session, name, stream_version, virtual_streams, states)

  

@@ -5,11 +5,11 @@ 

  from __future__ import absolute_import

  import logging

  

- import kobo.rpmlib

+ import dogpile.cache

  

  from module_build_service.common import models

  from module_build_service.common.config import conf

- from module_build_service.common.errors import UnprocessableEntity

+ from module_build_service.common.errors import StreamNotXyz, UnprocessableEntity

  from module_build_service.common.request_utils import requests_session

  from module_build_service.common.utils import load_mmd, import_mmd

  from module_build_service.resolver.KojiResolver import KojiResolver
@@ -17,14 +17,31 @@ 

  log = logging.getLogger()

  

  

+ def _canonicalize_state(state):

+     if isinstance(state, int):

+         return models.INVERSE_BUILD_STATES[state]

+     else:

+         return state

+ 

+ 

+ def _canonicalize_states(states):

+     if states:

+         result = sorted({_canonicalize_state(s) for s in states})

+         if result == ["ready"]:

+             return None

+         else:

+             return result

+ 

+ 

  class MBSResolver(KojiResolver):

  

      backend = "mbs"

  

+     region = dogpile.cache.make_region().configure("dogpile.cache.memory")

+ 

      def __init__(self, db_session, config):

          self.db_session = db_session

          self.mbs_prod_url = config.mbs_url

-         self._generic_error = "Failed to query MBS with query %r returned HTTP status %s"

  

      def _query_from_nsvc(self, name, stream, version=None, context=None, states=None):

          """
@@ -38,44 +55,65 @@ 

          states = states or ["ready"]

          query = {

              "name": name,

-             "stream": stream,

              "state": states,

              "verbose": True,

              "order_desc_by": "version",

          }

+         if stream is not None:

+             query["stream"] = stream

          if version is not None:

              query["version"] = str(version)

          if context is not None:

              query["context"] = context

          return query

  

-     def _get_modules(

-         self, name, stream, version=None, context=None, states=None, strict=False, **kwargs

+     @region.cache_on_arguments()

+     def _get_module_by_nsvc(

+         self, name, stream, version, context

      ):

-         """Query and return modules from MBS with specific info

+         """

+         Query a single module from MBS

  

-         :param str name: module's name.

-         :param str stream: module's stream.

-         :kwarg str version: a string or int of the module's version. When None,

-             latest version will be returned.

-         :kwarg str context: module's context. Optional.

-         :kwarg str state: module's state. Defaults to ``ready``.

-         :kwarg bool strict: Normally this function returns None if no module can be

-             found. If strict=True, then an UnprocessableEntity is raised.

-         :return: final list of module_info which pass repoclosure

-         :rtype: list[dict]

-         :raises UnprocessableEntity: if no modules are found and ``strict`` is True.

+         :param str name: Name of the module to query.

+         :param str stream: Stream of the module to query.

+         :param str version/int: Version of the module to query.

+         :param str context: Context of the module to query.

          """

+         query = self._query_from_nsvc(name, stream, version, context)

+         query['per_page'] = 5

+         query['page'] = 1

+ 

+         res = requests_session.get(self.mbs_prod_url, params=query)

+         res.raise_for_status()

+ 

+         data = res.json()

+         modules = data["items"]

+         if modules:

+             return modules[0]

+         else:

+             return None

+ 

+     # Args must exactly match call in _get_modules; newer versions of dogpile.cache

+     # have dogpile.cache.util.kwarg_function_key_generator which could help

+     @region.cache_on_arguments()

+     def _get_modules_with_cache(

+         self, name, stream, version, context,

+             states, base_module_br, virtual_stream, stream_version_lte

+     ):

          query = self._query_from_nsvc(name, stream, version, context, states)

          query["page"] = 1

-         query["per_page"] = 10

-         query.update(kwargs)

+         query["per_page"] = 5

+         if virtual_stream is not None:

+             query["virtual_stream"] = virtual_stream

+         if stream_version_lte is not None:

+             query["stream_version_lte"] = stream_version_lte

+         if base_module_br is not None:

+             query["base_module_br"] = base_module_br

          modules = []

  

          while True:

              res = requests_session.get(self.mbs_prod_url, params=query)

-             if not res.ok:

-                 raise RuntimeError(self._generic_error % (query, res.status_code))

+             res.raise_for_status()

  

              data = res.json()

              modules_per_page = data["items"]
@@ -84,23 +122,70 @@ 

              if not data["meta"]["next"]:

                  break

  

-             query["page"] += 1

+             if version is None and stream_version_lte is None:

+                 # Stop querying when we've gotten a different version

+                 if modules_per_page[-1]["version"] != modules[0]["version"]:

+                     break

  

-         # Error handling

-         if not modules:

-             if strict:

-                 raise UnprocessableEntity("Failed to find module in MBS %r" % query)

-             else:

-                 return modules

+             query["page"] += 1

  

-         if version is None and "stream_version_lte" not in kwargs:

+         if version is None and stream_version_lte is None:

              # Only return the latest version

-             return [m for m in modules if m["version"] == modules[0]["version"]]

+             results = [m for m in modules if m["version"] == modules[0]["version"]]

+         else:

+             results = modules

+ 

+         for m in results:

+             # We often come back and query details again for the module builds we return

+             # using the retrieved contexts, so prime the cache for a single-module queries

+             self._get_module_by_nsvc.set(

+                 m, self, m["name"], m["stream"], m["version"], m["context"]

+             )

+ 

+         return results

+ 

+     def _get_modules(

+         self, name, stream, version=None, context=None, states=None,

+             base_module_br=None, virtual_stream=None, stream_version_lte=None,

+             strict=False,

+ 

+     ):

+         """Query and return modules from MBS with specific info

+ 

+         :param str name: module's name.

+         :param str stream: module's stream.

+         :kwarg str version: a string or int of the module's version. When None,

+             latest version will be returned.

+         :kwarg str context: module's context. Optional.

+         :kwarg str states: states for modules. Defaults to ``["ready"]``.

+         :kwarg str base_module_br: if set, restrict results to modules requiring

+             this base_module nsvc

+         :kwarg list virtual_stream: if set, limit to modules for the given virtual_stream

+         :kwarg float stream_version_lte: If set, stream must be less than this version

+         :kwarg bool strict: Normally this function returns None if no module can be

+             found. If strict=True, then an UnprocessableEntity is raised.

+         :return: final list of module_info which pass repoclosure

+         :rtype: list[dict]

+         """

+         modules = self._get_modules_with_cache(

+             name, stream, version, context,

+             states, base_module_br, virtual_stream, stream_version_lte

+         )

+         if not modules and strict:

+             raise UnprocessableEntity("Failed to find module in MBS %s:%s:%s:%s" %

+                                       (name, stream, version, context))

          else:

              return modules

  

-     def get_module(self, name, stream, version, context, states=None, strict=False):

-         rv = self._get_modules(name, stream, version, context, states, strict)

+     def get_module(self, name, stream, version, context, strict=False):

+         if version and context:

+             rv = self._get_module_by_nsvc(name, stream, version, context)

+             if strict and rv is None:

+                 raise UnprocessableEntity("Failed to find module in MBS")

+ 

+             return rv

+ 

+         rv = self._get_modules(name, stream, version, context, strict=strict)

          if rv:

              return rv[0]

  
@@ -114,8 +199,7 @@ 

          query = {"page": 1, "per_page": 1, "short": True}

          query.update(kwargs)

          res = requests_session.get(self.mbs_prod_url, params=query)

-         if not res.ok:

-             raise RuntimeError(self._generic_error % (query, res.status_code))

+         res.raise_for_status()

  

          data = res.json()

          return data["meta"]["total"]
@@ -138,23 +222,37 @@ 

              "virtual_stream": virtual_stream,

          }

          res = requests_session.get(self.mbs_prod_url, params=query)

-         if not res.ok:

-             raise RuntimeError(self._generic_error % (query, res.status_code))

+         res.raise_for_status()

  

          data = res.json()

          if data["items"]:

              return load_mmd(data["items"][0]["modulemd"])

  

+     def _modules_to_modulemds(self, modules, strict):

+         if not modules:

+             return []

+ 

+         mmds = []

+         for module in modules:

+             yaml = module["modulemd"]

+             if not yaml:

+                 if strict:

+                     raise UnprocessableEntity(

+                         "Failed to find modulemd entry in MBS for %r" % module)

+                 else:

+                     log.warning("Failed to find modulemd entry in MBS for %r", module)

+                     continue

+ 

+             mmds.append(load_mmd(yaml))

+         return mmds

+ 

      def get_module_modulemds(

          self,

          name,

          stream,

          version=None,

          context=None,

-         strict=False,

-         stream_version_lte=False,

-         virtual_streams=None,

-         states=None,

+         strict=False

      ):

          """

          Gets the module modulemds from the resolver.
@@ -166,48 +264,15 @@ 

              be returned.

          :kwarg strict: Normally this function returns [] if no module can be

              found.  If strict=True, then a UnprocessableEntity is raised.

-         :kwarg stream_version_lte: If True and if the `stream` can be transformed to

-             "stream version", the returned list will include all the modules with stream version

-             less than or equal the stream version computed from `stream`.

-         :kwarg virtual_streams: a list of the virtual streams to filter on. The filtering uses "or"

-             logic. When falsy, no filtering occurs.

          :return: List of Modulemd metadata instances matching the query

          """

-         yaml = None

- 

          local_modules = models.ModuleBuild.local_modules(self.db_session, name, stream)

          if local_modules:

              return [m.mmd() for m in local_modules]

  

-         extra_args = {}

-         if stream_version_lte and (

-             len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5

-         ):

-             stream_version = models.ModuleBuild.get_stream_version(stream)

-             extra_args["stream_version_lte"] = stream_version

- 

-         if virtual_streams:

-             extra_args["virtual_stream"] = virtual_streams

- 

-         modules = self._get_modules(name, stream, version, context, strict=strict, states=states,

-                                     **extra_args)

-         if not modules:

-             return []

- 

-         mmds = []

-         for module in modules:

-             if module:

-                 yaml = module["modulemd"]

- 

-             if not yaml:

-                 if strict:

-                     raise UnprocessableEntity(

-                         "Failed to find modulemd entry in MBS for %r" % module)

-                 else:

-                     return None

+         modules = self._get_modules(name, stream, version, context, strict=strict)

  

-             mmds.append(load_mmd(yaml))

-         return mmds

+         return self._modules_to_modulemds(modules, strict)

  

      def get_compatible_base_module_modulemds(

          self, base_module_mmd, stream_version_lte, virtual_streams, states
@@ -225,18 +290,38 @@ 

          :param stream_version_lte: If True, the compatible streams are limited

               by the stream version computed from `stream`. If False, even the

               modules with higher stream version are returned.

-         :param virtual_streams: List of virtual streams. If set, also modules

-             with incompatible stream version are returned in case they share

-             one of the virtual streams.

+         :param virtual_streams: List of virtual streams.

+             Modules are returned if they share one of the virtual streams.

          :param states: List of states the returned compatible modules should

              be in.

          :return list: List of Modulemd objects.

          """

+         if not virtual_streams:

+             raise RuntimeError("Virtual stream list must not be empty")

+ 

          name = base_module_mmd.get_module_name()

-         stream = base_module_mmd.get_stream_name()

-         return self.get_module_modulemds(

-             name, stream, stream_version_lte=stream_version_lte, virtual_streams=virtual_streams,

-             states=states)

+ 

+         extra_args = {}

+         if stream_version_lte:

+             stream = base_module_mmd.get_stream_name()

+             stream_in_xyz_format = len(str(models.ModuleBuild.get_stream_version(

+                 stream, right_pad=False))) >= 5

+ 

+             if not stream_in_xyz_format:

+                 raise StreamNotXyz(

+                     "Cannot get compatible base modules, because stream of resolved "

+                     "base module %s:%s is not in x.y.z format." % (

+                         base_module_mmd.get_module_name(), stream

+                     ))

+ 

+             stream_version = models.ModuleBuild.get_stream_version(stream)

+             extra_args["stream_version_lte"] = stream_version

+ 

+         extra_args["virtual_stream"] = virtual_streams

+         extra_args["states"] = _canonicalize_states(states)

+ 

+         modules = self._get_modules(name, None, **extra_args)

+         return self._modules_to_modulemds(modules, False)

  

      def get_buildrequired_modulemds(self, name, stream, base_module_mmd):

          """
@@ -269,7 +354,7 @@ 

              return ret

          else:

              modules = self._get_modules(

-                 name, stream, strict=False, base_module_br=base_module_mmd.get_nsvc())

+                 name, stream, base_module_br=base_module_mmd.get_nsvc())

              return [load_mmd(module["modulemd"]) for module in modules]

  

      def resolve_profiles(self, mmd, keys):
@@ -304,7 +389,7 @@ 

                  continue

  

              # Find the dep in the built modules in MBS

-             modules = self._get_modules(

+             module = self.get_module(

                  module_name,

                  module_info["stream"],

                  module_info["version"],
@@ -312,14 +397,13 @@ 

                  strict=True,

              )

  

-             for module in modules:

-                 yaml = module["modulemd"]

-                 dep_mmd = load_mmd(yaml)

-                 # Take note of what rpms are in this dep's profile.

-                 for key in keys:

-                     profile = dep_mmd.get_profile(key)

-                     if profile:

-                         results[key] |= set(profile.get_rpms())

+             yaml = module["modulemd"]

+             dep_mmd = load_mmd(yaml)

+             # Take note of what rpms are in this dep's profile.

+             for key in keys:

+                 profile = dep_mmd.get_profile(key)

+                 if profile:

+                     results[key] |= set(profile.get_rpms())

  

          # Return the union of all rpms in all profiles of the given keys.

          return results
@@ -365,12 +449,15 @@ 

          else:

              queried_module = self.get_module(name, stream, version, context, strict=strict)

              yaml = queried_module["modulemd"]

-             queried_mmd = load_mmd(yaml)

+             if yaml:

+                 queried_mmd = load_mmd(yaml)

+             else:

+                 queried_mmd = None

  

          if not queried_mmd or "buildrequires" not in queried_mmd.get_xmd().get("mbs", {}):

              raise RuntimeError(

                  'The module "{0!r}" did not contain its modulemd or did not have '

-                 "its xmd attribute filled out in MBS".format(queried_mmd)

+                 "its xmd attribute filled out in MBS".format(queried_module)

              )

  

          buildrequires = queried_mmd.get_xmd()["mbs"]["buildrequires"]
@@ -380,26 +467,23 @@ 

                  self.db_session, name, details["stream"])

              if local_modules:

                  for m in local_modules:

-                     # If the buildrequire is a meta-data only module with no Koji tag set, then just

-                     # skip it

-                     if m.koji_tag is None:

-                         continue

                      module_tags[m.koji_tag] = m.mmd()

                  continue

  

              if "context" not in details:

                  details["context"] = models.DEFAULT_MODULE_CONTEXT

-             modules = self._get_modules(

+ 

+             m = self.get_module(

                  name, details["stream"], details["version"], details["context"], strict=True)

-             for m in modules:

-                 if m["koji_tag"] in module_tags:

-                     continue

-                 # If the buildrequire is a meta-data only module with no Koji tag set, then just

-                 # skip it

-                 if m["koji_tag"] is None:

-                     continue

-                 module_tags.setdefault(m["koji_tag"], [])

-                 module_tags[m["koji_tag"]].append(load_mmd(m["modulemd"]))

+ 

+             if m["koji_tag"] in module_tags:

+                 continue

+             # If the buildrequire is a meta-data only module with no Koji tag set, then just

+             # skip it

+             if m["koji_tag"] is None:

+                 continue

+             module_tags.setdefault(m["koji_tag"], [])

+             module_tags[m["koji_tag"]].append(load_mmd(m["modulemd"]))

  

          return module_tags

  
@@ -448,7 +532,6 @@ 

  

              commit_hash = None

              version = None

-             filtered_rpms = []

              module = self.get_module(

                  module_name, module_stream, module_version, module_context, strict=True

              )
@@ -457,20 +540,6 @@ 

                  if mmd.get_xmd().get("mbs", {}).get("commit"):

                      commit_hash = mmd.get_xmd()["mbs"]["commit"]

  

-                 # Find out the particular NVR of filtered packages

-                 if "rpms" in module and mmd.get_rpm_filters():

-                     for rpm in module["rpms"]:

-                         nvr = kobo.rpmlib.parse_nvra(rpm)

-                         # If the package is not filtered, continue

-                         if not nvr["name"] in mmd.get_rpm_filters():

-                             continue

- 

-                         # If the nvr is already in filtered_rpms, continue

-                         nvr = kobo.rpmlib.make_nvr(nvr, force_epoch=True)

-                         if nvr in filtered_rpms:

-                             continue

-                         filtered_rpms.append(nvr)

- 

              if module.get("version"):

                  version = module["version"]

  
@@ -481,11 +550,10 @@ 

                      "version": str(version),

                      "context": module["context"],

                      "koji_tag": module["koji_tag"],

-                     "filtered_rpms": filtered_rpms,

                  }

              else:

                  raise RuntimeError(

-                     'The module "{0}" didn\'t contain either a commit hash or a'

+                     'The module "{0}" didn\'t contain both a commit hash and a'

                      " version in MBS".format(module_name)

                  )

              # If the module is a base module, then import it in the database so that entries in

@@ -78,7 +78,7 @@ 

          return False

  

      @abstractmethod

-     def get_module(self, name, stream, version, context, state="ready", strict=False):

+     def get_module(self, name, stream, version, context, strict=False):

          raise NotImplementedError()

  

      @abstractmethod

@@ -83,7 +83,9 @@ 

      #

      # 1) The `conf.allow_only_compatible_base_modules` is False. This means that MBS should

      #    not try to find any compatible base modules in its DB and simply use the buildrequired

-     #    base module as it is.

+     #    base module as it is. (*NOTE* This is different than the meaning of

+     #    allow_only_compatible_base_modules=False when looking up build requirements - where

+     #    it means to accept any module buildrequiring a base modul sharing a virtual stream.)

      # 2) The `conf.allow_only_compatible_base_modules` is True and DBResolver is used. This means

      #    that MBS should try to find the compatible modules using its database.

      #    The `get_base_module_mmds` finds out the list of compatible modules and returns mmds of

@@ -11,7 +11,7 @@ 

  from module_build_service.builder.MockModuleBuilder import load_local_builds

  from module_build_service.common import models

  from module_build_service.common.config import conf

- from module_build_service.common.errors import UnprocessableEntity

+ from module_build_service.common.errors import StreamNotXyz, UnprocessableEntity

  from module_build_service.common.models import ModuleBuild

  from module_build_service.common.modulemd import Modulemd

  from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str
@@ -80,6 +80,37 @@ 

                  "platform:f29.2.0:3:00000000"

              }

  

+     @pytest.mark.parametrize("provide_test_data",

+                              [{"multiple_stream_versions": True}], indirect=True)

+     def test_get_compatible_base_module_modulemds_no_virtual(self, provide_test_data):

+         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db")

+ 

+         platform = db_session.query(ModuleBuild).filter_by(name="platform", stream="f29.1.0").one()

+         platform_mmd = platform.mmd()

+ 

+         with pytest.raises(RuntimeError, match=r"Virtual stream list must not be empty"):

+             resolver.get_compatible_base_module_modulemds(

+                 platform_mmd, stream_version_lte=True, virtual_streams=[],

+                 states=[models.BUILD_STATES["ready"]]

+             )

+ 

+     def test_get_compatible_base_module_modulemds_stream_not_xyz(

+             self, require_platform_and_default_arch, caplog

+     ):

+         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db")

+ 

+         platform = db_session.query(ModuleBuild).filter_by(name="platform", stream="f28").one()

+         platform_mmd = platform.mmd()

+ 

+         with pytest.raises(

+                 StreamNotXyz,

+                 match=(r"Cannot get compatible base modules, because stream "

+                        r"of resolved base module platform:f28 is not in x\.y\.z format")

+         ):

+             resolver.get_compatible_base_module_modulemds(

+                 platform_mmd, stream_version_lte=True, virtual_streams=["f29"],

+                 states=[models.BUILD_STATES["ready"]])

+ 

      @pytest.mark.parametrize("empty_buildrequires", [False, True])

      def test_get_module_build_dependencies(self, empty_buildrequires, reuse_component_init_data):

          """

file modified
+454 -251
@@ -2,10 +2,14 @@ 

  # SPDX-License-Identifier: MIT

  from __future__ import absolute_import

  

- from mock import patch, PropertyMock, Mock, call

+ from mock import patch, PropertyMock, Mock

+ import os

+ 

+ import pytest

  

  from module_build_service import app

  from module_build_service.builder.MockModuleBuilder import load_local_builds

+ from module_build_service.common.errors import StreamNotXyz, UnprocessableEntity

  import module_build_service.common.models

  from module_build_service.common import conf

  from module_build_service.common.utils import load_mmd, mmd_to_str
@@ -14,254 +18,347 @@ 

  import tests

  

  

- class TestMBSModule:

-     @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):

-         """ Tests for querying a module from mbs """

+ def add_item(test_items, yaml_name=None, mmd=None, context=None, koji_tag='auto'):

+     if mmd is None:

+         modulemd = tests.read_staged_data(yaml_name)

+         mmd = load_mmd(modulemd)

+ 

+     if context:

+         mmd = mmd.copy()

+         mmd.set_context(context)

+ 

+     item = {

+         "name": mmd.get_module_name(),

+         "stream": mmd.get_stream_name(),

+         "version": str(mmd.get_version()),

+         "context": mmd.get_context(),

+         "modulemd": mmd_to_str(mmd),

+         "_mmd": mmd,

+     }

+ 

+     if koji_tag == 'auto':

+         koji_tag = "module-{name}-{stream}-{version}-{context}".format(**item)

+     item["koji_tag"] = koji_tag

+ 

+     test_items.append(item)

+ 

+ 

+ test_items = []

+ add_item(test_items, "formatted_testmodule")

+ add_item(test_items, "formatted_testmodule", context="c2c572ed")

+ add_item(test_items, "platform", koji_tag="module-f28-build")

+ 

+ 

+ ABSENT = object()

+ 

+ 

+ class FakeMBS(object):

+     def __init__(self, session_mock):

+         session_mock.get = self.get

+         self.items = test_items

+         self.request_count = 0

+         self.required_params = {

+             'order_desc_by': 'version',

+             'state': ['ready'],

+             'verbose': True,

+             'per_page': 5,

+         }

+ 

+     def item_matches(self, item, params):

+         for k in ("name", "stream", "version", "context", "koji_tag"):

+             if k in params and item[k] != params[k]:

+                 return False

+ 

+         return True

+ 

+     def modify_item_mmd(self, nsvc, modify):

+         old_items = self.items

+         self.items = []

+         for item in old_items:

+             if item["_mmd"].get_nsvc() == 'testmodule:master:20180205135154:9c690d0e':

+                 item = dict(item)

+                 mmd = item["_mmd"].copy()

+ 

+                 modify(mmd)

+ 

+                 item["_mmd"] = mmd

+                 item["modulemd"] = mmd_to_str(mmd)

+ 

+             self.items.append(item)

+ 

+     def get(self, url, params={}):

+         self.request_count += 1

+ 

+         for k, v in self.required_params.items():

+             if v == ABSENT:

+                 assert k not in params

+             else:

+                 assert params[k] == v

+ 

+         all_items = [i for i in self.items

+                      if self.item_matches(i, params)]

+ 

+         page = int(params.get('page', 1))

+         per_page = int(params.get('per_page', 5))

+ 

+         result_items = all_items[(page - 1) * per_page:page * per_page]

+         if page * per_page < len(all_items):

+             next_ = ("https://mbs.example.com/module-build-service/1/module-builds/"

+                      "?per_page={}&page={}&verbose=1".format(per_page, page + 1))

+         else:

+             next_ = None

+ 

          mock_res = Mock()

-         mock_res.ok.return_value = True

          mock_res.json.return_value = {

-             "items": [

-                 {

-                     "name": "testmodule",

-                     "stream": "master",

-                     "version": "20180205135154",

-                     "context": "9c690d0e",

-                     "modulemd": testmodule_mmd_9c690d0e,

-                 }

-             ],

-             "meta": {"next": None},

+             "items": result_items,

+             "meta": {"next": next_},

          }

  

-         mock_session.get.return_value = mock_res

+         return mock_res

+ 

+ 

+ class TestMBSModule:

+     def setup_method(self):

+         mbs_resolver.MBSResolver.MBSResolver.region.invalidate()

+ 

+     @pytest.fixture

+     def fake_mbs(self):

+         patcher = patch("module_build_service.resolver.MBSResolver.requests_session")

+         session_mock = patcher.start()

+ 

+         yield FakeMBS(session_mock)

+ 

+         patcher.stop()

+ 

+     @pytest.fixture

+     def local_builds(self, require_empty_database):

+         with patch("module_build_service.common.config.Config.system",

+                    new_callable=PropertyMock,

+                    return_value="test"):

+             with patch("module_build_service.common.config.Config.mock_resultsdir",

+                        new_callable=PropertyMock,

+                        return_value=tests.staged_data_filename("local_builds")):

+                 yield

+ 

+     @pytest.fixture

+     def resolver(self):

+         yield mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

+ 

+     def test_get_module_modulemds_nsvc(self, resolver, fake_mbs):

+         """ Tests for querying a module from mbs """

  

-         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

          module_mmds = resolver.get_module_modulemds(

-             "testmodule", "master", "20180205135154", "9c690d0e", virtual_streams=["f28"]

+             "testmodule", "master", "20180205135154", "9c690d0e"

          )

          nsvcs = set(m.get_nsvc() for m in module_mmds)

          expected = {"testmodule:master:20180205135154:9c690d0e"}

-         mbs_url = conf.mbs_url

-         expected_query = {

-             "name": "testmodule",

-             "stream": "master",

-             "version": "20180205135154",

-             "context": "9c690d0e",

-             "verbose": True,

-             "order_desc_by": "version",

-             "page": 1,

-             "per_page": 10,

-             "state": ["ready"],

-             "virtual_stream": ["f28"],

-         }

-         mock_session.get.assert_called_once_with(mbs_url, params=expected_query)

          assert nsvcs == expected

  

-     @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_modulemds_partial(

-         self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed

+             self, resolver, fake_mbs, formatted_testmodule_mmd

      ):

-         """ Test for querying MBS without the context of a module """

- 

-         version = "20180205135154"

- 

-         mock_res = Mock()

-         mock_res.ok.return_value = True

-         mock_res.json.return_value = {

-             "items": [

-                 {

-                     "name": "testmodule",

-                     "stream": "master",

-                     "version": version,

-                     "context": "9c690d0e",

-                     "modulemd": testmodule_mmd_9c690d0e,

-                 },

-                 {

-                     "name": "testmodule",

-                     "stream": "master",

-                     "version": version,

-                     "context": "c2c572ed",

-                     "modulemd": testmodule_mmd_c2c572ed,

-                 },

-             ],

-             "meta": {"next": None},

-         }

- 

-         mock_session.get.return_value = mock_res

-         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

-         ret = resolver.get_module_modulemds("testmodule", "master", version)

+         """ Test for querying MBS without the version of a module """

+ 

+         fake_mbs.items = []

+ 

+         # First page has version1.context[0..4]

+         # Second page has version1.context5, version2.context[0..3]

+         # Third page has version2.context[4..5]

+         # We should only query the first two pages

+         for i in range(0, 2):

+             for context in range(0, 6):

+                 context_string = "{0:08d}".format(context)

+                 m = formatted_testmodule_mmd.copy()

+                 m.set_version(20180205135154 - i)

+                 m.set_context(context_string)

+                 add_item(fake_mbs.items, mmd=m)

+ 

+         ret = resolver.get_module_modulemds("testmodule", "master")

          nsvcs = set(m.get_nsvc() for m in ret)

          expected = {

-             "testmodule:master:20180205135154:9c690d0e",

-             "testmodule:master:20180205135154:c2c572ed",

-         }

-         mbs_url = conf.mbs_url

-         expected_query = {

-             "name": "testmodule",

-             "stream": "master",

-             "version": version,

-             "verbose": True,

-             "order_desc_by": "version",

-             "page": 1,

-             "per_page": 10,

-             "state": ["ready"],

+             "testmodule:master:20180205135154:00000000",

+             "testmodule:master:20180205135154:00000001",

+             "testmodule:master:20180205135154:00000002",

+             "testmodule:master:20180205135154:00000003",

+             "testmodule:master:20180205135154:00000004",

+             "testmodule:master:20180205135154:00000005",

          }

-         mock_session.get.assert_called_once_with(mbs_url, params=expected_query)

          assert nsvcs == expected

+         assert fake_mbs.request_count == 2

  

-     @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_build_dependencies(

-         self, mock_session, platform_mmd, testmodule_mmd_9c690d0e

+     def test_get_module_modulemds_cache(

+             self, resolver, fake_mbs, formatted_testmodule_mmd

      ):

-         """

-         Tests that we return just direct build-time dependencies of testmodule.

-         """

-         mock_res = Mock()

-         mock_res.ok.return_value = True

-         mock_res.json.side_effect = [

-             {

-                 "items": [

-                     {

-                         "name": "testmodule",

-                         "stream": "master",

-                         "version": "20180205135154",

-                         "context": "9c690d0e",

-                         "modulemd": testmodule_mmd_9c690d0e,

-                     }

-                 ],

-                 "meta": {"next": None},

-             },

-             {

-                 "items": [

-                     {

-                         "name": "platform",

-                         "stream": "f28",

-                         "version": "3",

-                         "context": "00000000",

-                         "modulemd": platform_mmd,

-                         "koji_tag": "module-f28-build",

-                     }

-                 ],

-                 "meta": {"next": None},

-             },

-         ]

+         """ Test that query results are cached """

+ 

+         ret1 = resolver.get_module_modulemds("testmodule", "master")

+         ret2 = resolver.get_module_modulemds("testmodule", "master")

+ 

+         assert {m.get_nsvc() for m in ret1} == {m.get_nsvc() for m in ret2}

+         assert fake_mbs.request_count == 1

+ 

+     @pytest.mark.parametrize('strict', (False, True))

+     def test_get_module_modulemds_not_found(self, resolver, fake_mbs, strict):

+         def get_nonexistent():

+             return resolver.get_module_modulemds("testmodule", "master", "0", strict=strict)

+ 

+         if strict:

+             with pytest.raises(UnprocessableEntity):

+                 get_nonexistent()

+         else:

+             assert get_nonexistent() == []

+ 

+     @pytest.mark.parametrize('strict', (False, True))

+     def test_get_module_modulemds_no_yaml(self, resolver, fake_mbs, strict):

+         fake_mbs.items = [dict(i) for i in (fake_mbs.items)]

+         for i in fake_mbs.items:

+             i["modulemd"] = None

+ 

+         def get_modulemds():

+             return resolver.get_module_modulemds(

+                 "testmodule", "master", "20180205135154", strict=strict

+             )

+ 

+         if strict:

+             with pytest.raises(UnprocessableEntity):

+                 get_modulemds()

+         else:

+             assert get_modulemds() == []

+ 

+     def test_get_module_modulemds_local_module(self, resolver, fake_mbs, local_builds):

+         load_local_builds(["platform:f30", "testmodule"])

+ 

+         ret = resolver.get_module_modulemds("testmodule", "master")

+         nsvcs = set(m.get_nsvc() for m in ret)

+         expected = {"testmodule:master:20170816080816:321"}

+         assert nsvcs == expected

  

-         mock_session.get.return_value = mock_res

+     def test_get_module_build_dependencies(self, resolver, fake_mbs):

          expected = {"module-f28-build"}

-         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

          result = resolver.get_module_build_dependencies(

              "testmodule", "master", "20180205135154", "9c690d0e").keys()

  

-         expected_queries = [

-             {

-                 "name": "testmodule",

-                 "stream": "master",

-                 "version": "20180205135154",

-                 "context": "9c690d0e",

-                 "verbose": True,

-                 "order_desc_by": "version",

-                 "page": 1,

-                 "per_page": 10,

-                 "state": ["ready"],

-             },

-             {

-                 "name": "platform",

-                 "stream": "f28",

-                 "version": "3",

-                 "context": "00000000",

-                 "verbose": True,

-                 "order_desc_by": "version",

-                 "page": 1,

-                 "per_page": 10,

-                 "state": ["ready"],

-             },

-         ]

- 

-         mbs_url = conf.mbs_url

-         expected_calls = [

-             call(mbs_url, params=expected_queries[0]),

-             call(mbs_url, params=expected_queries[1]),

-         ]

-         mock_session.get.mock_calls = expected_calls

-         assert mock_session.get.call_count == 2

          assert set(result) == expected

  

-     @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_module_build_dependencies_empty_buildrequires(

-         self, mock_session, testmodule_mmd_9c690d0e

+     def test_get_module_build_dependencies_from_mmd(

+             self, resolver, fake_mbs, formatted_testmodule_mmd, platform_mmd

      ):

+         loaded_platform_mmd = load_mmd(platform_mmd)

+ 

+         fake_mbs.items = []

+         add_item(

+             fake_mbs.items,

+             mmd=loaded_platform_mmd,

+             koji_tag='module-f28-build'

+         )

+         # Test that duplicated koji tags are ignored

+         add_item(

+             fake_mbs.items,

+             mmd=loaded_platform_mmd.copy('platform2', 'f28'),

+             koji_tag='module-f28-build'

+         )

+         # Test that modules without koji tags (metadata modules) are ignored

+         add_item(

+             fake_mbs.items,

+             mmd=loaded_platform_mmd.copy('metadata', 'f28'),

+             koji_tag=None

+         )

  

-         mmd = load_mmd(testmodule_mmd_9c690d0e)

-         # Wipe out the dependencies

-         for deps in mmd.get_dependencies():

-             mmd.remove_dependencies(deps)

+         mmd = formatted_testmodule_mmd.copy()

          xmd = mmd.get_xmd()

-         xmd["mbs"]["buildrequires"] = {}

+         xmd["mbs"]["buildrequires"].update({

+             'platform2': {

+                 'name': 'platform2',

+                 'stream': 'f28',

+                 'version': '3',

+                 'context': '00000000',

+             },

+             'metadata': {

+                 'name': 'metadata',

+                 'stream': 'f28',

+                 'version': '3',

+                 'context': '00000000',

+             },

+         })

          mmd.set_xmd(xmd)

  

-         mock_res = Mock()

-         mock_res.ok.return_value = True

-         mock_res.json.side_effect = [

-             {

-                 "items": [

-                     {

-                         "name": "testmodule",

-                         "stream": "master",

-                         "version": "20180205135154",

-                         "context": "9c690d0e",

-                         "modulemd": mmd_to_str(mmd),

-                         "build_deps": [],

-                     }

-                 ],

-                 "meta": {"next": None},

-             }

-         ]

+         add_item(fake_mbs.items, mmd=mmd, koji_tag=None)

  

-         mock_session.get.return_value = mock_res

+         expected = {"module-f28-build"}

+         result = resolver.get_module_build_dependencies(mmd=mmd)

+ 

+         assert set(result) == expected

+ 

+     def test_get_module_build_dependencies_local_module(

+             self, resolver, fake_mbs, formatted_testmodule_mmd, local_builds

+     ):

+         load_local_builds(["platform:f28"])

+ 

+         results = list(resolver.get_module_build_dependencies(mmd=formatted_testmodule_mmd).keys())

+         assert len(results) == 1

+         result = results[0]

+ 

+         assert os.path.isabs(result)

+         assert result.endswith('/staged_data/local_builds/module-platform-f28-3/results')

+ 

+     def test_get_module_build_dependencies_missing_version(

+             self, resolver, fake_mbs, formatted_testmodule_mmd

+     ):

+         with pytest.raises(RuntimeError,

+                            match=r"The name, stream, version, and/or context weren't specified"):

+             resolver.get_module_build_dependencies("testmodule", "master", None, "9c690d0e")

  

+     def test_get_module_build_dependencies_bad_mmd(self, resolver, fake_mbs):

+         fake_mbs.items = [dict(i) for i in (fake_mbs.items)]

+         for i in fake_mbs.items:

+             i["modulemd"] = None

+ 

+         with pytest.raises(RuntimeError,

+                            match=(r'The module "{.*}" did not contain its modulemd '

+                                   r'or did not have its xmd attribute filled out in MBS')):

+             resolver.get_module_build_dependencies(

+                 "testmodule", "master", "20180205135154", "9c690d0e")

+ 

+     def test_get_module_build_dependencies_empty_buildrequires(

+         self, resolver, fake_mbs, local_builds

+     ):

+         def modify(mmd):

+             # Wipe out the dependencies

+             for deps in mmd.get_dependencies():

+                 mmd.remove_dependencies(deps)

+             xmd = mmd.get_xmd()

+             xmd["mbs"]["buildrequires"] = {}

+             mmd.set_xmd(xmd)

+ 

+         fake_mbs.modify_item_mmd('testmodule:master:20180205135154:9c690d0e', modify)

          expected = set()

  

          resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

          result = resolver.get_module_build_dependencies(

              "testmodule", "master", "20180205135154", "9c690d0e"

          ).keys()

-         mbs_url = conf.mbs_url

-         expected_query = {

-             "name": "testmodule",

-             "stream": "master",

-             "version": "20180205135154",

-             "context": "9c690d0e",

-             "verbose": True,

-             "order_desc_by": "version",

-             "page": 1,

-             "per_page": 10,

-             "state": ["ready"],

-         }

-         mock_session.get.assert_called_once_with(mbs_url, params=expected_query)

          assert set(result) == expected

  

-     @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_resolve_profiles(

-         self, mock_session, formatted_testmodule_mmd, platform_mmd

+     def test_get_module_build_dependencies_no_context(

+         self, resolver, fake_mbs, local_builds

      ):

+         def modify(mmd):

+             xmd = mmd.get_xmd()

+             for name, details in xmd["mbs"]["buildrequires"].items():

+                 # Should be treated as 00000000

+                 del details["context"]

+             mmd.set_xmd(xmd)

+ 

+         fake_mbs.modify_item_mmd('testmodule:master:20180205135154:9c690d0e', modify)

+         expected = {"module-f28-build"}

  

-         mock_res = Mock()

-         mock_res.ok.return_value = True

-         mock_res.json.return_value = {

-             "items": [

-                 {

-                     "name": "platform",

-                     "stream": "f28",

-                     "version": "3",

-                     "context": "00000000",

-                     "modulemd": platform_mmd,

-                 }

-             ],

-             "meta": {"next": None},

-         }

- 

-         mock_session.get.return_value = mock_res

          resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

+         result = resolver.get_module_build_dependencies(

+             "testmodule", "master", "20180205135154", "9c690d0e"

+         ).keys()

+         assert set(result) == expected

+ 

+     def test_resolve_profiles(self, resolver, fake_mbs, formatted_testmodule_mmd):

          result = resolver.resolve_profiles(

              formatted_testmodule_mmd, ("buildroot", "srpm-buildroot")

          )
@@ -303,49 +400,82 @@ 

              },

          }

  

-         mbs_url = conf.mbs_url

-         expected_query = {

-             "name": "platform",

-             "stream": "f28",

-             "version": "3",

-             "context": "00000000",

-             "verbose": True,

-             "order_desc_by": "version",

-             "page": 1,

-             "per_page": 10,

-             "state": ["ready"],

-         }

- 

-         mock_session.get.assert_called_once_with(mbs_url, params=expected_query)

          assert result == expected

  

-     @patch(

-         "module_build_service.common.config.Config.system",

-         new_callable=PropertyMock,

-         return_value="test",

-     )

-     @patch(

-         "module_build_service.common.config.Config.mock_resultsdir",

-         new_callable=PropertyMock,

-         return_value=tests.staged_data_filename("local_builds")

-     )

      def test_resolve_profiles_local_module(

-         self, local_builds, conf_system, formatted_testmodule_mmd, require_empty_database

+             self, resolver, local_builds, formatted_testmodule_mmd

      ):

          load_local_builds(["platform:f28"])

  

-         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

          result = resolver.resolve_profiles(

              formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))

          expected = {"buildroot": {"foo"}, "srpm-buildroot": {"bar"}}

          assert result == expected

  

-     @patch("module_build_service.resolver.MBSResolver.requests_session")

-     def test_get_empty_buildrequired_modulemds(self, request_session):

-         resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")

-         request_session.get.return_value = Mock(ok=True)

-         request_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}

+     def test_get_compatible_base_module_modulemds(

+             self, resolver, fake_mbs, formatted_testmodule_mmd

+     ):

+         mmd = formatted_testmodule_mmd.copy('testmodule', 'f28.1.0')

+ 

+         fake_mbs.required_params.update({

+             'state': ['garbage', 'ready'],

+             'stream_version_lte': 280100,

+             'virtual_stream': ['f28'],

+         })

+         resolver.get_compatible_base_module_modulemds(mmd, True,

+                                                       ['f28'], ['ready', 'garbage'])

+ 

+     def test_get_compatible_base_module_modulemds_no_stream_version_lte(

+             self, resolver, fake_mbs, formatted_testmodule_mmd

+     ):

+         mmd = formatted_testmodule_mmd.copy('testmodule', 'f28.1.0')

+ 

+         fake_mbs.required_params.update({

+             'state': ['garbage', 'ready'],

+             'stream_version_lte': ABSENT,

+             'virtual_stream': ['f28'],

+         })

+         resolver.get_compatible_base_module_modulemds(mmd, False,

+                                                       ['f28'], ['ready', 'garbage'])

+ 

+     @pytest.mark.parametrize('states,canonical_states', [

+         ([module_build_service.common.models.BUILD_STATES['ready']], ['ready']),

+         (None, ['ready']),

+         (['ready', 'garbage'], ['garbage', 'ready']),

+     ])

+     def test_get_compatible_base_module_modulemds_canonicalize_state(

+             self, resolver, fake_mbs, formatted_testmodule_mmd, states, canonical_states

+     ):

+         mmd = formatted_testmodule_mmd.copy('testmodule', 'f28.1.0')

  

+         fake_mbs.required_params.update({

+             'state': canonical_states,

+             'stream_version_lte': 280100,

+             'virtual_stream': ['f28'],

+         })

+         resolver.get_compatible_base_module_modulemds(mmd, True, ['f28'], states)

+ 

+     def test_get_compatible_base_module_modulemds_no_virtual_stream(

+             self, resolver, fake_mbs, formatted_testmodule_mmd

+     ):

+         mmd = formatted_testmodule_mmd.copy('testmodule', 'f28.1.0')

+         with pytest.raises(RuntimeError, match=r"Virtual stream list must not be empty"):

+             resolver.get_compatible_base_module_modulemds(mmd, True,

+                                                           [], ['ready'])

+ 

+     def test_get_compatible_base_module_modulemds_stream_not_xyz(

+             self, resolver, fake_mbs, formatted_testmodule_mmd

+     ):

+         mmd = formatted_testmodule_mmd.copy('testmodule', 'f28')

+         with pytest.raises(

+                 StreamNotXyz,

+                 match=(r"Cannot get compatible base modules, because stream "

+                        r"of resolved base module testmodule:f28 is not in x\.y\.z format")

+         ):

+             resolver.get_compatible_base_module_modulemds(mmd, True,

+                                                           ['f28'], ['ready'])

+ 

+     def test_get_empty_buildrequired_modulemds(self, resolver, fake_mbs):

          platform = db_session.query(

              module_build_service.common.models.ModuleBuild).filter_by(id=1).one()

          result = resolver.get_buildrequired_modulemds("nodejs", "10", platform.mmd())
@@ -390,10 +520,43 @@ 

          assert 1 == mmd.get_version()

          assert "c1" == mmd.get_context()

  

+     @pytest.mark.parametrize('strict', (False, True))

+     def test_get_module(self, resolver, fake_mbs, strict):

+         module = resolver.get_module(

+             "testmodule", "master", "20180205135154", "9c690d0e", strict=strict

+         )

+         assert module["version"] == "20180205135154"

+ 

+         if strict:

+             with pytest.raises(UnprocessableEntity):

+                 module = resolver.get_module(

+                     "testmodule", "master", "12345", "9c690d0e", strict=strict

+                 )

+         else:

+             module = resolver.get_module(

+                 "testmodule", "master", "12345", "9c690d0e", strict=strict

+             )

+             assert module is None

+ 

+     def test_get_module_cache(self, resolver, fake_mbs):

+         for i in range(0, 2):

+             resolver.get_module(

+                 "testmodule", "master", "20180205135154", "9c690d0e"

+             )

+         assert fake_mbs.request_count == 1

+ 

+     def test_get_module_precache(self, resolver, fake_mbs):

+         mmd = resolver.get_module_modulemds("testmodule", "master")[0]

+         module = resolver.get_module(

+             mmd.get_module_name(), mmd.get_stream_name(),

+             mmd.get_version(), mmd.get_context()

+         )

+         assert int(module["version"]) == mmd.get_version()

+         assert fake_mbs.request_count == 1

+ 

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_module_count(self, mock_session):

          mock_res = Mock()

-         mock_res.ok.return_value = True

          mock_res.json.return_value = {

              "items": [{"name": "platform", "stream": "f28", "version": "3", "context": "00000000"}],

              "meta": {"total": 5},
@@ -412,7 +575,6 @@ 

      @patch("module_build_service.resolver.MBSResolver.requests_session")

      def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd):

          mock_res = Mock()

-         mock_res.ok.return_value = True

          mock_res.json.return_value = {

              "items": [

                  {
@@ -443,19 +605,7 @@ 

              },

          )

  

-     @patch(

-         "module_build_service.common.config.Config.system",

-         new_callable=PropertyMock,

-         return_value="test",

-     )

-     @patch(

-         "module_build_service.common.config.Config.mock_resultsdir",

-         new_callable=PropertyMock,

-         return_value=tests.staged_data_filename("local_builds")

-     )

-     def test_get_buildrequired_modulemds_local_builds(

-         self, local_builds, conf_system, require_empty_database

-     ):

+     def test_get_buildrequired_modulemds_local_builds(self, local_builds):

          with app.app_context():

              load_local_builds(["testmodule"])

  
@@ -514,3 +664,56 @@ 

          assert "10" == mmd.get_stream_name()

          assert 2 == mmd.get_version()

          assert "c1" == mmd.get_context()

+ 

+     def test_resolve_requires(self, resolver, fake_mbs):

+         result = resolver.resolve_requires(["platform:f28:3:00000000", "testmodule:master"])

+         result_skeleton = {

+             k: "{stream}:{version}:{context}".format(**v) for k, v in result.items()

+         }

+ 

+         assert result_skeleton == {

+             "platform": "f28:3:00000000",

+             "testmodule": "master:20180205135154:9c690d0e",

+         }

+ 

+     def test_resolve_requires_bad_nsvc(self, resolver, fake_mbs):

+         with pytest.raises(

+                 ValueError,

+                 match=r"Only N:S or N:S:V:C is accepted by resolve_requires, got platform"):

+             resolver.resolve_requires(["platform"])

+ 

+     def test_resolve_requires_no_commit_hash(self, resolver, fake_mbs):

+         def modify(mmd):

+             xmd = mmd.get_xmd()

+             del xmd["mbs"]["commit"]

+             mmd.set_xmd(xmd)

+ 

+         fake_mbs.modify_item_mmd('testmodule:master:20180205135154:9c690d0e', modify)

+ 

+         with pytest.raises(

+                 RuntimeError,

+                 match=(r"The module \"testmodule\" didn't contain "

+                        r"both a commit hash and a version in MBS")):

+             resolver.resolve_requires(["testmodule:master"])

+ 

+     def test_resolve_requires_local_builds(self, resolver, local_builds, fake_mbs):

+         load_local_builds(["platform:f30", "testmodule"])

+ 

+         result = resolver.resolve_requires(["testmodule:master"])

+         result_skeleton = {

+             k: "{stream}:{version}:{context}".format(**v) for k, v in result.items()

+         }

+ 

+         assert result_skeleton == {

+             "testmodule": "master:20170816080816:321"

+         }

+ 

+     def test_get_modulemd_by_koji_tag(self, resolver, fake_mbs):

+         fake_mbs.required_params = {

+             'verbose': True

+         }

+         mmd = resolver.get_modulemd_by_koji_tag("module-testmodule-master-20180205135154-9c690d0e")

+         assert mmd.get_nsvc() == "testmodule:master:20180205135154:9c690d0e"

+ 

+         mmd = resolver.get_modulemd_by_koji_tag("module-testmodule-master-1-1")

+         assert mmd is None

@@ -90,6 +90,13 @@ 

          default_modules.add_default_modules(mmd)

  

  

+ # The fedora-style base module versioning here, with f27/f28 sharing the same

+ # virtual_stream doesn't work with allow_only_compatible_base_modules, since

+ # that expects FOOx.y.z versioning.

+ @patch(

+     "module_build_service.common.config.Config.allow_only_compatible_base_modules",

+     new=False

+ )

  @patch("module_build_service.scheduler.default_modules._get_default_modules")

  def test_add_default_modules_compatible_platforms(mock_get_dm, require_empty_database):

      """

@@ -2,11 +2,14 @@ 

  # SPDX-License-Identifier: MIT

  from __future__ import absolute_import

  

+ from datetime import timedelta

+ 

  import mock

  import pytest

  from sqlalchemy.orm.session import make_transient

  

  from module_build_service.common import models

+ from module_build_service.common.errors import StreamNotXyz

  from module_build_service.common.modulemd import Modulemd

  from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str

  from module_build_service.scheduler.db_session import db_session
@@ -293,80 +296,129 @@ 

          assert reusable_module.id == build_module.reused_module_id

          assert reusable_module.id == reused_module.id

  

-     @pytest.mark.parametrize("allow_ocbm", (True, False))

+     _EXCEPTION = object()

+ 

+     @pytest.mark.parametrize("allow_ocbm,build_req,available_req,expected", [

+         # When allow_only_compatible_base_modules is false, only modules

+         # built against the exact same base module stream can be used

+         (False, "f29.2.0", ("f29.2.0",), "f29.2.0"),

+         (False, "f29.2.0", ("f29.0.0",), None),

+         # When it is true, any base module x.y2.z2 where y2.z2 <= y1.z1

+         # is a candidate

+         (True, "f29.2.0", ("f29.2.0", "f29.0.0", "f29.3.0"), "f29.2.0"),

+         (True, "f29.2.0", ("f29.1.0", "f29.0.0", "f29.3.0"), "f29.1.0"),

+         # But if the major is different, they don't (the code below adds "f28.0.0"

+         # with the "f29" virtual stream, so it would be a candidate otherwise)

+         (True, "f29.2.0", ("f28.0.0",), None),

+         # the virtual stream must also match (+ is used to skip virtual stream addition)

+         (True, "f29.2.0", ("+f29.1.0",), None),

+         # If the buildrequired base module isn't in x.y.z form, we raise an

+         # exception

+         (True, "f29", ("f29",), _EXCEPTION),

+         (True, "f29", ("f29.0.0",), _EXCEPTION),

+     ])

      @mock.patch(

          "module_build_service.common.config.Config.allow_only_compatible_base_modules",

          new_callable=mock.PropertyMock,

      )

-     def test_get_reusable_module_use_latest_build(self, cfg, allow_ocbm):

+     def test_get_reusable_module_use_latest_build(

+             self, cfg, allow_ocbm, build_req, available_req, expected):

          """

          Test that the `get_reusable_module` tries to reuse the latest module in case when

          multiple modules can be reused allow_only_compatible_base_modules is True.

          """

          cfg.return_value = allow_ocbm

-         # Set "fedora" virtual stream to platform:f28.

-         platform_f28 = db_session.query(models.ModuleBuild).filter_by(name="platform").one()

-         mmd = platform_f28.mmd()

-         xmd = mmd.get_xmd()

-         xmd["mbs"]["virtual_streams"] = ["fedora"]

-         mmd.set_xmd(xmd)

-         platform_f28.modulemd = mmd_to_str(mmd)

-         platform_f28.update_virtual_streams(db_session, ["fedora"])

  

-         # Create platform:f29 with "fedora" virtual stream.

-         mmd = load_mmd(read_staged_data("platform"))

-         mmd = mmd.copy("platform", "f29")

-         xmd = mmd.get_xmd()

-         xmd["mbs"]["virtual_streams"] = ["fedora"]

-         mmd.set_xmd(xmd)

-         platform_f29 = import_mmd(db_session, mmd)[0]

+         # Create all the platform streams we need

+         needed_platform_streams = set([build_req])

+         needed_platform_streams.update(available_req)

+         platform_modules = {}

+         for stream in needed_platform_streams:

+             skip_virtual = False

+             if stream.startswith('+'):

+                 skip_virtual = True

+                 stream = stream[1:]

+ 

+             # Create platform:f29.0.0 with "f29" virtual stream.

+             mmd = load_mmd(read_staged_data("platform"))

+             mmd = mmd.copy("platform", stream)

+             xmd = mmd.get_xmd()

+             xmd["mbs"]["virtual_streams"] = [] if skip_virtual else ["f29"]

+             mmd.set_xmd(xmd)

+             platform_modules[stream] = import_mmd(db_session, mmd)[0]

+ 

+         # Modify a module to buildrequire a particular platform stream

+         def modify_buildrequires(module, platform_module):

+             mmd = module.mmd()

+             deps = mmd.get_dependencies()[0]

+             deps.clear_buildtime_dependencies()

+             deps.add_buildtime_stream('platform', platform_module.stream)

+             xmd = mmd.get_xmd()

+             xmd["mbs"]["buildrequires"]["platform"]["stream"] = platform_module.stream

+             mmd.set_xmd(xmd)

+             module.modulemd = mmd_to_str(mmd)

+             contexts = models.ModuleBuild.contexts_from_mmd(

+                 module.modulemd

+             )

+             module.build_context = contexts.build_context

+             module.context = contexts.context

+             module.buildrequires = [platform_module]

+ 

+         stream_to_testmodule_id = {}

+ 

+         first = True

+         for stream in available_req:

+             if stream.startswith('+'):

+                 stream = stream[1:]

+ 

+             if first:

+                 # Reuse the testmodule:master already in the database

+                 module = db_session.query(models.ModuleBuild).filter_by(

+                     name="testmodule", state=models.BUILD_STATES["ready"]).one()

+ 

+                 modify_buildrequires(module, platform_modules[stream])

+                 time_completed = module.time_completed

+ 

+                 first = False

+             else:

+                 # Create another copy of `testmodule:master`, and modify it

+                 module = db_session.query(models.ModuleBuild).filter_by(

+                     name="testmodule", state=models.BUILD_STATES["ready"]).first()

+ 

+                 # This is used to clone the ModuleBuild SQLAlchemy object without recreating it from

+                 # scratch.

+                 db_session.expunge(module)

+                 make_transient(module)

+ 

+                 modify_buildrequires(module, platform_modules[stream])

+ 

+                 # Add modules with ascending time_completed

+                 time_completed += timedelta(days=1)

+                 module.time_completed = time_completed

+ 

+                 # Set the `id` to None, so new one is generated by SQLAlchemy.

+                 module.id = None

+                 db_session.add(module)

  

-         # Create another copy of `testmodule:master` which should be reused, because its

-         # stream version will be higher than the previous one. Also set its buildrequires

-         # to platform:f29.

-         latest_module = db_session.query(models.ModuleBuild).filter_by(

-             name="testmodule", state=models.BUILD_STATES["ready"]).one()

-         # This is used to clone the ModuleBuild SQLAlchemy object without recreating it from

-         # scratch.

-         db_session.expunge(latest_module)

-         make_transient(latest_module)

- 

-         # Change the platform:f28 buildrequirement to platform:f29 and recompute the build_context.

-         mmd = latest_module.mmd()

-         xmd = mmd.get_xmd()

-         xmd["mbs"]["buildrequires"]["platform"]["stream"] = "f29"

-         mmd.set_xmd(xmd)

-         latest_module.modulemd = mmd_to_str(mmd)

-         contexts = models.ModuleBuild.contexts_from_mmd(

-             latest_module.modulemd

-         )

-         latest_module.build_context = contexts.build_context

-         latest_module.context = contexts.context

-         latest_module.buildrequires = [platform_f29]

- 

-         # Set the `id` to None, so new one is generated by SQLAlchemy.

-         latest_module.id = None

-         db_session.add(latest_module)

-         db_session.commit()

+             db_session.commit()

+             stream_to_testmodule_id[stream] = module.id

  

          module = db_session.query(models.ModuleBuild)\

                             .filter_by(name="testmodule")\

                             .filter_by(state=models.BUILD_STATES["build"])\

                             .one()

+         modify_buildrequires(module, platform_modules[build_req])

          db_session.commit()

  

-         reusable_module = get_reusable_module(module)

- 

-         if allow_ocbm:

-             assert reusable_module.id == latest_module.id

+         if expected is TestUtilsModuleReuse._EXCEPTION:

+             with pytest.raises(StreamNotXyz):

+                 get_reusable_module(module)

          else:

-             # There are two testmodules in ready state, the first one with

-             # lower id is what we want.

-             first_module = db_session.query(models.ModuleBuild).filter_by(

-                 name="testmodule", state=models.BUILD_STATES["ready"]

-             ).order_by(models.ModuleBuild.id).first()

- 

-             assert reusable_module.id == first_module.id

+             reusable_module = get_reusable_module(module)

+             if expected:

+                 assert reusable_module.id == stream_to_testmodule_id[expected]

+             else:

+                 assert reusable_module is None

  

      @pytest.mark.parametrize("allow_ocbm", (True, False))

      @mock.patch(

The goal of this is to speed up MBSResolver - local builds were spending around 10 seconds waiting for repeated calls to MBS before they started doing anything. So I adding tweaked the API calls and added caching. To check that was OK, I spent some time getting MBSResolver up to 100% test coverage, which revealed some small weirdnesses/bugs/dead code, also fixed in this PR. In other words, the last two commits are the interesting part, the rest is somewhere between yak shaving and a rathole.

Build a661c1be3a52753b83c82e9f674138332371feba FAILED!
Rebase or make new commits to rebuild.

Just wanted to mention that I found some issues with the patch in here: "Tweak behavior of get_compatible_base_module_modulemds" - so it's not worth trying to sort through that one yet. I'll do a new version later today or tomorrow.

rebased onto 2983536b16c6167118655e85b6e4655cff2eae21

3 years ago

OK, pushed a new version that continues the yak-shaving / rathole-diving around allow_only_compatible_base_modules with a stream version that isn't of the form <stream>x.y.z
- this new version has documentation and extra checks to try to keep both maintainers and users in a less confused state.

This should be ready for review now.

Build f2b42ff5d1223679bd6d25a2478966d0f6c0f36e FAILED!
Rebase or make new commits to rebuild.

Why would the int() indicate a problem here?

I think that was meant to be a reminder to me to double-check, not that I actually thought there was a problem. Double checking, ModuleBuild.version is a string, but Modulemd.ModuleStream.get_version() returns uint64, so it seems like this cast is what you would expect. (Ideally, I suppose, MBS and libmodulemd would be on the same page about the data type, but since the MBS data type is part of the JSON API, it's nothing that is going to change.) I'll remove the FIXME along with whatever other changes are needed.

The caching and efficiency improvements make sense - overall LGTM. Once this is rebased I'll give it another look over and merge.

Commit https://pagure.io/fork/otaylor/fm-orchestrator/c/2983536b16c6167118655e85b6e4655cff2eae21 is a fix for #1574, which was subsquently fixed by PR#1715. It's not significantly better (presents the information differently, adds an extra test case), so I'll drop it on the rebase.

rebased onto e489831

2 years ago

Commit 6b76877 fixes this pull-request

Pull-Request has been merged by breilly

2 years ago

Pull-Request has been merged by breilly

2 years ago