#1030 Resolve stream collision with modules added to ursine content
Merged 5 months ago by mprahl. Opened 7 months ago by cqi.
cqi/fm-orchestrator stream-collision  into  master

@@ -41,8 +41,10 @@ 

      import xmlrpc.client as xmlrpclib

  

  import munch

+ from itertools import chain

  from OpenSSL.SSL import SysCallError

  

+ 

  from module_build_service import log, conf, models

  import module_build_service.scm

  import module_build_service.utils

@@ -288,6 +290,12 @@ 

          return filtered_rpms

  

      @staticmethod

+     def format_conflicts_line(nevr):

+         """Helper method to format a Conflicts line with a RPM N-E:V-R"""

+         parsed_nvr = kobo.rpmlib.parse_nvr(nevr)

+         return "Conflicts: {name} = {epoch}:{version}-{release}".format(**parsed_nvr)

+ 

+     @staticmethod

      def get_disttag_srpm(disttag, module_build):

  

          # Taken from Karsten's create-distmacro-pkg.sh

@@ -304,22 +312,29 @@ 

          # build-requires based on their "mmd.filter.rpms". So we set the

          # module-build-macros to conflict with these filtered RPMs to ensure

          # they won't be installed to buildroot.

-         filter_conflicts = ""

+         filter_conflicts = []

          for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items():

              if req_data["filtered_rpms"]:

-                 filter_conflicts += "# Filtered rpms from %s module:\n" % (

-                     req_name)

+                 filter_conflicts.append("# Filtered rpms from %s module:" % req_name)

              # Check if the module depends on itself

              if req_name == module_build.name:

                  filtered_rpms = KojiModuleBuilder._get_filtered_rpms_on_self_dep(

                      module_build, req_data["filtered_rpms"])

              else:

                  filtered_rpms = req_data["filtered_rpms"]

-             for nvr in filtered_rpms:

-                 parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

-                 filter_conflicts += "Conflicts: %s = %s:%s-%s\n" % (

-                     parsed_nvr["name"], parsed_nvr["epoch"],

-                     parsed_nvr["version"], parsed_nvr["release"])

+             filter_conflicts.extend(map(

+                 KojiModuleBuilder.format_conflicts_line, filtered_rpms))

+ 

+             if req_name in conf.base_module_names and 'ursine_rpms' in req_data:

+                 comments = (

+                     '# Filter out RPMs from stream collision modules found from ursine content'

+                     ' for base module {}:'.format(req_name),

+                     '# ' + ', '.join(req_data['stream_collision_modules']),

+                 )

+                 filter_conflicts.extend(chain(

+                     comments,

+                     map(KojiModuleBuilder.format_conflicts_line, req_data['ursine_rpms'])

+                 ))

  

          spec_content = """

  %global dist {disttag}

@@ -373,7 +388,7 @@ 

             module_stream=module_build.stream,

             module_version=module_build.version,

             module_context=module_build.context,

-            filter_conflicts=filter_conflicts)

+            filter_conflicts='\n'.join(filter_conflicts))

  

          modulemd_macros = ""

          rpm_buildopts = mmd.get_rpm_buildopts()

@@ -477,6 +477,10 @@ 

              'default': 'db',

              'desc': 'Where to look up for modules. Note that this can (and '

                      'probably will) be builder-specific.'},

+         'koji_external_repo_url_prefix': {

+             'type': str,

+             'default': 'https://kojipkgs.fedoraproject.org/',

+             'desc': 'URL prefix of base module\'s external repo.'},

      }

  

      def __init__(self, conf_section_obj):

@@ -366,6 +366,11 @@ 

                  sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion))

          return query.all()

  

+     @staticmethod

+     def get_build_by_koji_tag(session, tag):

+         """Get build by its koji_tag"""

+         return session.query(ModuleBuild).filter_by(koji_tag=tag).first()

+ 

      def mmd(self):

          try:

              mmd = Modulemd.Module().new_from_string(self.modulemd)

@@ -40,6 +40,15 @@ 

      def __init__(self, config):

          self.config = config

  

+     def _get_module(self, name, stream, version, context, strict=False):

@cqi, my comment from below is still open. Could you please explain why you created this method instead of reusing get_module_modulemds?

+         with models.make_session(self.config) as session:

+             mb = models.ModuleBuild.get_build_from_nsvc(

+                 session, name, stream, version, context)

+             if mb is None and strict:

+                 raise UnprocessableEntity(

+                     'Cannot find any module builds for %s:%s' % (name, stream))

Since all the parameters are required, why doesn't the error message say 'Cannot find the module build "{}:{}:{}:{}"'.format(name, stream, version, context)?

+             return mb.extended_json()

+ 

      def get_module_modulemds(self, name, stream, version=None, context=None, strict=False,

                               stream_version_lte=False):

          """

@@ -57,11 +66,12 @@ 

              less than or equal the stream version computed from `stream`.

          :return: List of Modulemd metadata instances matching the query

          """

+         if version and context:

+             mmd = self._get_module(name, stream, version, context, strict=strict)

+             return [self.extract_modulemd(mmd['modulemd'])]

If all you're doing is getting the modulemd file, why not just reuse get_module_modulemds instead of creating _get_module?

+ 

          with models.make_session(self.config) as session:

-             if version and context:

-                 builds = [models.ModuleBuild.get_build_from_nsvc(

-                     session, name, stream, version, context)]

-             elif not version and not context:

+             if not version and not context:

                  if (stream_version_lte and len(str(models.ModuleBuild.get_stream_version(

                          stream, right_pad=False))) >= 5):

                      stream_version = models.ModuleBuild.get_stream_version(stream)

@@ -306,3 +316,8 @@ 

                  }

  

          return new_requires

+ 

+     def get_modulemd_by_koji_tag(self, tag):

+         with models.make_session(self.config) as session:

+             module = models.ModuleBuild.get_build_by_koji_tag(session, tag)

+             return module.mmd() if module else None

@@ -394,3 +394,12 @@ 

                  import_mmd(db.session, mmd)

  

          return new_requires

+ 

+     def get_modulemd_by_koji_tag(self, tag):

+         resp = self.session.get(self.mbs_prod_url, params={'koji_tag': tag, 'verbose': True})

+         data = resp.json()

+         if data['items']:

+             modulemd = data['items'][0]['modulemd']

+             return self.extract_modulemd(modulemd)

+         else:

+             return None

@@ -37,7 +37,14 @@ 

      """

  

      _resolvers = cfg.SUPPORTED_RESOLVERS

+ 

+     # Resolver name. Each subclass of GenericResolver must set its own name.

      backend = "generic"

+ 

+     # Supported resolver backends registry. Generally, resolver backend is

+     # registered by calling :meth:`GenericResolver.register_backend_class`.

+     # This is a mapping from resolver name to backend class object

+     # For example, {'mbs': MBSResolver}

      backends = {}

  

      @classmethod

@@ -46,13 +53,19 @@ 

  

      @classmethod

      def create(cls, config, backend=None, **extra):

+         """Factory method to create a resolver object

+ 

+         :param config: MBS config object.

+         :type config: :class:`Config`

+         :kwarg str backend: resolver backend name, e.g. 'db'. If omitted,

+             system configuration ``resolver`` is used.

+         :kwarg extra: any additional arguments are optional extras which can

+             be passed along and are implementation-dependent.

+         :return: resolver backend object.

+         :rtype: corresponding registered resolver class.

+         :raises ValueError: if specified resolver backend name is not

+             registered.

          """

-         :param backend: a string representing resolver e.g. 'db'

- 

-         Any additional arguments are optional extras which can be passed along

-         and are implementation-dependent.

-         """

- 

          # get the appropriate resolver backend via configuration

          if not backend:

              backend = conf.resolver

@@ -115,3 +128,13 @@ 

      @abstractmethod

      def resolve_requires(self, requires):

          raise NotImplementedError()

+ 

+     @abstractmethod

+     def get_modulemd_by_koji_tag(self, tag):

+         """Get module metadata by module's koji_tag

+ 

+         :param str tag: name of module's koji_tag.

+         :return: module metadata

+         :rtype: Modulemd.Module

+         """

+         raise NotImplementedError()

@@ -157,6 +157,7 @@ 

          build.transition(conf, models.BUILD_STATES["wait"])

      # Catch custom exceptions that we can expose to the user

      except (UnprocessableEntity, Forbidden, ValidationError, RuntimeError) as e:

+         log.exception(str(e))

          # Rollback changes underway

          session.rollback()

          build.transition(conf, models.BUILD_STATES["failed"], state_reason=str(e))

@@ -27,3 +27,4 @@ 

  from module_build_service.utils.reuse import *  # noqa

  from module_build_service.utils.submit import *  # noqa

  from module_build_service.utils.batches import *  # noqa

+ from module_build_service.utils.ursine import *  # noqa

@@ -42,47 +42,30 @@ 

      ValidationError, UnprocessableEntity, Forbidden, Conflict)

  from module_build_service import glib

  from .mse import generate_expanded_mmds

+ from module_build_service.utils.ursine import record_stream_collision_modules

  

  

  def record_filtered_rpms(mmd):

-     """

-     Reads the mmd["xmd"]["buildrequires"] and extends it with "filtered_rpms"

-     list containing the NVRs of filtered RPMs in a buildrequired module.

+     """Record filtered RPMs that should not be installed into buildroot

+ 

+     These RPMs are filtered:

+ 

+     * Reads the mmd["xmd"]["buildrequires"] and extends it with "filtered_rpms"

+       list containing the NVRs of filtered RPMs in a buildrequired module.

+ 

+     * Every base module could have stream collision modules, whose built RPMs

+       should be filtered out to avoid collision. The filtered out RPMs will be

+       stored into the base module as well.

  

-     :param Modulemd mmd: Modulemd of input module.

-     :rtype: Modulemd

+     :param Modulemd mmd: Modulemd that will be built next.

+     :rtype: Modulemd.Module

      :return: Modulemd extended with the "filtered_rpms" in XMD section.

      """

-     # Imported here to allow import of utils in GenericBuilder.

-     from module_build_service.builder import GenericBuilder

- 

      new_buildrequires = {}

-     resolver = module_build_service.resolver.system_resolver

      for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items():

-         # In case this is module resubmit or local build, the filtered_rpms

-         # will already be there, so there is no point in generating them again.

-         if "filtered_rpms" in req_data:

-             new_buildrequires[req_name] = req_data

-             continue

- 

-         # We can just get the first modulemd data from result right here thanks to

-         # strict=True, so in case the module cannot be found, get_module_modulemds

-         # raises an exception.

-         req_mmd = resolver.get_module_modulemds(

-             req_name, req_data["stream"], req_data["version"], req_data["context"], True)[0]

- 

-         # Find out the particular NVR of filtered packages

-         filtered_rpms = []

-         rpm_filter = req_mmd.get_rpm_filter()

-         if rpm_filter and rpm_filter.get():

-             rpm_filter = rpm_filter.get()

-             built_nvrs = GenericBuilder.backends[conf.system].get_built_rpms_in_module_build(

-                 req_mmd)

-             for nvr in built_nvrs:

-                 parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

-                 if parsed_nvr["name"] in rpm_filter:

-                     filtered_rpms.append(nvr)

-         req_data["filtered_rpms"] = filtered_rpms

+         _record_filtered_rpms(req_name, req_data)

+         if req_name in conf.base_module_names and 'stream_collision_modules' in req_data:

+             _record_ursine_rpms(req_data)

          new_buildrequires[req_name] = req_data

  

      # Replace the old buildrequires with new ones.

@@ -92,6 +75,91 @@ 

      return mmd

  

  

+ def _record_filtered_rpms(req_name, req_data):

+     """Store filtered RPMs by a buildrequire module

+ 

+     This methods looks for key ``filtered_rpms`` in a buildrequired module's

+     metadata. If there is, nothing is done, just keep it unchanged, which case

+     could be a module is resubmitted or a local build.

+ 

+     Otherwise, ``_record_filtered_rpms`` will get module's RPMs listed in

+     filter section, then store them with the key into metadata in place.

+ 

+     :param str req_name: name of a buildrequired module.

+     :param dict req_data: a mapping containing metadata of the buildrequired

+         module. A pair of ``req_name`` and ``req_data`` is usually the one of

+         xmd.mbs.buildrequires, which are stored during the module stream

+         expansion process. At least, ``req_data`` must have keys stream,

+         version and context. Key ``filtered_rpms`` will be added as a list of

+         RPMs N-E:V-R.

+     """

+     # Imported here to allow import of utils in GenericBuilder.

+     from module_build_service.builder import GenericBuilder

+ 

+     # In case this is module resubmit or local build, the filtered_rpms

+     # will already be there, so there is no point in generating them again.

+     if "filtered_rpms" in req_data:

+         return

+ 

+     resolver = module_build_service.resolver.GenericResolver.create(conf)

+ 

+     # We can just get the first modulemd data from result right here thanks to

+     # strict=True, so in case the module cannot be found, get_module_modulemds

+     # raises an exception.

+     req_mmd = resolver.get_module_modulemds(

+         req_name, req_data["stream"], req_data["version"], req_data["context"], True)[0]

+ 

+     # Find out the particular NVR of filtered packages

+     filtered_rpms = []

+     rpm_filter = req_mmd.get_rpm_filter()

+     if rpm_filter and rpm_filter.get():

+         rpm_filter = rpm_filter.get()

+         built_nvrs = GenericBuilder.backends[conf.system].get_built_rpms_in_module_build(

+             req_mmd)

+         for nvr in built_nvrs:

+             parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

+             if parsed_nvr["name"] in rpm_filter:

+                 filtered_rpms.append(nvr)

+     req_data["filtered_rpms"] = filtered_rpms

+ 

+ 

+ def _record_ursine_rpms(req_data):

+     """Store ursine RPMs

+ 

+     This method handles key ``stream_collision_modules`` from a buildrequired

+     module's metadata to find out all built RPMs and then store them with a new

+     key ``ursine_rpms`` into metadata in place.

+ 

+     :param dict req_data: a mapping containing metadata of the buildrequired

+         module. At least, ``req_data`` must have keys stream, version and

+         context. As a result, a new key ``filtered_ursine_rpms`` will be added

+         with a list of RPMs N-E:V-R which are built for the found stream

+         collision modules.

+     """

+     from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

+     resolver = module_build_service.resolver.GenericResolver.create(conf)

+ 

+     # Key stream_collision_modules is not used after rpms are recorded, but

+     # just keep it here in case it would be helpful in the future.

+     modules_nsvc = req_data['stream_collision_modules']

+     built_rpms = []

+ 

+     koji_session = KojiModuleBuilder.get_session(conf, None)

+ 

+     for nsvc in modules_nsvc:

+         name, stream, version, context = nsvc.split(':')

+         module = resolver._get_module(name, stream, version, context, strict=True)

+         rpms = koji_session.listTaggedRPMS(module['koji_tag'], latest=True)[0]

+         built_rpms.extend(

+             kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms

+         )

+ 

+     # In case there is duplicate NEVRs, ensure every NEVR is unique in the final list.

+     # And, sometimes, sorted list of RPMs would be easier to read.

+     built_rpms = sorted(set(built_rpms))

+     req_data['ursine_rpms'] = built_rpms

+ 

+ 

  def _scm_get_latest(pkg):

      try:

          # If the modulemd specifies that the 'f25' branch is what

@@ -367,11 +435,6 @@ 

                                    optional_params=None):

      yaml_file = handle.read()

      mmd = load_mmd(yaml_file)

- 

-     # Mimic the way how default values are generated for modules that are stored in SCM

-     # We can take filename as the module name as opposed to repo name,

-     # and also we can take numeric representation of current datetime

-     # as opposed to datetime of the last commit

      dt = datetime.utcfromtimestamp(int(time.time()))

      def_name = str(os.path.splitext(os.path.basename(handle.filename))[0])

      def_version = int(dt.strftime("%Y%m%d%H%M%S"))

@@ -524,6 +587,9 @@ 

              module.transition(conf, transition_to, "Resubmitted by %s" % username)

              log.info("Resumed existing module build in previous state %s" % module.state)

          else:

+             log.info('Start to handle potential module stream collision.')

+             record_stream_collision_modules(mmd)

+ 

              log.debug('Creating new module build')

              module = models.ModuleBuild.create(

                  db.session,

@@ -0,0 +1,228 @@ 

+ # -*- coding: utf-8 -*-

+ # Copyright (c) 2018  Red Hat, Inc.

+ #

+ # Permission is hereby granted, free of charge, to any person obtaining a copy

+ # of this software and associated documentation files (the "Software"), to deal

+ # in the Software without restriction, including without limitation the rights

+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell

+ # copies of the Software, and to permit persons to whom the Software is

+ # furnished to do so, subject to the following conditions:

+ #

+ # The above copyright notice and this permission notice shall be included in all

+ # copies or substantial portions of the Software.

+ #

+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR

+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,

+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE

+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER

+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,

+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

+ # SOFTWARE.

+ #

+ # Written by Chenxiong Qi <cqi@redhat.com>

+ 

+ import re

+ 

+ from module_build_service import conf, log, glib

+ from module_build_service.resolver import system_resolver

+ 

+ 

+ """

+ This module handles module stream collision with ursine content before a module

+ is built.

+ 

+ This kind of collision would happen when packages, which are managed by

+ Ursa-Major, are used to build a module. Let's see an example, when a module foo

+ buildrequires bar with stream A, however module bar with another stream B was

+ already added to ursine content by Ursa-Major when it has been built and moved

+ to ready state. Hence, MBS has to ensure any packages from module bar:B are not

+ present in the buildroot.

+ 

+ A technical background:

+ 

+ Generally, each module buildrequires a platform module, which is associated

+ with ursine content by adding an external repository to the platform module's

+ tag. That repository is generated from a build tag that inherits from a set

+ modules' koji tag through its tag inheritance hierarchy. Ursa-Major manages

+ the inheritance relationship.

+ 

+ Stream collision modules are just those modules added to tag inheritance by

+ Ursa-Major.

+ """

+ 

+ 

+ def find_build_tags_from_external_repos(koji_session, repo_infos):

+     """Find build tags from external repos

+ 

+     An external repo added to a tag could be an arbitrary external repository.

+     Hence, this method tries best to guess the tags from each external repo's

+     URL by a regular expression.

+ 

+     :param repo_infos: list of mappings represeting external repos information.

+     :type repo_infos: list[dict]

+     :return: a list of tag names.

+     :rtype: list[str]

+     """

+     re_external_repo_url = r'^{}/repos/(.+-build)/latest/\$arch/?$'.format(

+         conf.koji_external_repo_url_prefix.rstrip('/'))

+     tag_names = []

+     for info in repo_infos:

+         match = re.match(re_external_repo_url, info['url'])

+         if match:

+             name = match.groups()[0]

+             if koji_session.getTag(name) is None:

+                 log.warning('Ignoring the found tag %s because no tag info was found '

+                             'with this name.', name)

+             else:

+                 tag_names.append(name)

+         else:

+             log.warning('The build tag could not be parsed from external repo '

+                         '%s whose url is %s.',

+                         info['external_repo_name'], info['url'])

+     return tag_names

+ 

+ 

+ def find_module_koji_tags(koji_session, build_tag):

+     """

+     Find module koji tags from parents of build tag through the tag inheritance

+ 

+     MBS supports a few prefixes which are configured in

+     ``conf.koij_tag_prefixes``. Tags with a configured prefix will be

+     considered as a module's koji tag.

+ 

+     :param koji_session: instance of Koji client session.

+     :type koji_session: ClientSession

+     :param str build_tag: tag name, which is the build tag inheriting from

+         parent tags where module koji tags are contained.

+     :return: list of module koji tags.

+     :rtype: list[str]

+     """

+     return [

+         data['name'] for data in koji_session.getFullInheritance(build_tag)

+         if any(data['name'].startswith(prefix) for prefix in conf.koji_tag_prefixes)

+     ]

+ 

+ 

+ def get_modulemds_from_ursine_content(tag):

+     """Get all modules metadata which were added to ursine content

+ 

+     Ursine content is the tag inheritance managed by Ursa-Major by adding

+     specific modules' koji_tag.

+ 

+     Background of module build based on ursine content:

+ 

+     Each module build buildrequires a platform module, which is a presudo-module

+     used to connect to an external repository whose packages will be present

+     in the buildroot. In practice, the external repo is generated from a build

+     tag which could inherit from a few module koji_tags so that those module's

+     RPMs could be build dependencies for some specific packages.

+ 

+     So, this function is to find out all module koji_tags from the build tag

+     and return corresponding module metadata.

+ 

+     :param str tag: a base module's koji_tag.

+     :return: list of module metadata. Empty list will be returned if no ursine

+         modules metadata is found.

+     :rtype: list[Modulemd.Module]

+     """

+     from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

+     koji_session = KojiModuleBuilder.get_session(conf, None)

+     repos = koji_session.getExternalRepoList(tag)

+     build_tags = find_build_tags_from_external_repos(koji_session, repos)

+     if not build_tags:

+         log.debug('No external repo containing ursine content is found.')

+         return []

+     modulemds = []

+     for tag in build_tags:

+         koji_tags = find_module_koji_tags(koji_session, tag)

+         for koji_tag in koji_tags:

+             md = system_resolver.get_modulemd_by_koji_tag(koji_tag)

+             if md:

+                 modulemds.append(md)

+             else:

+                 log.warning('No module is found by koji_tag \'%s\'', koji_tag)

+     return modulemds

+ 

+ 

+ def find_stream_collision_modules(buildrequired_modules, koji_tag):

+     """

+     Find buildrequired modules that are part of the ursine content represented

+     by the koji_tag but with a different stream.

+ 

+     :param dict buildrequired_modules: a mapping of buildrequires, which is just

+         the ``xmd/mbs/buildrequires``. This mapping is used to determine if a module

+         found from ursine content is a buildrequire with different stream.

+     :param str koji_tag: a base module's koji_tag. Modules will be retrieved from

+         ursine content associated with this koji_tag and check if there are

+         modules that collide.

+     :return: a list of NSVC of collision modules. If no collision module is

+         found, an empty list is returned.

+     :rtype: list[str]

+     """

+     ursine_modulemds = get_modulemds_from_ursine_content(koji_tag)

+     if not ursine_modulemds:

+         log.debug('No module metadata is found from ursine content.')

+         return []

+ 

+     collision_modules = [

+         item.dup_nsvc()

+         for item in ursine_modulemds

+         # If some module in the ursine content is one of the buildrequires but has

+         # different stream, that is what we want to record here, whose RPMs will be

+         # excluded from buildroot by adding them into SRPM module-build-macros as

+         # Conflicts.

+         if (item.get_name() in buildrequired_modules and

+             item.get_stream() != buildrequired_modules[item.get_name()]['stream'])

+     ]

+ 

+     for item in collision_modules:

+         name, stream, _ = item.split(':', 2)

+         log.info('Buildrequired module %s exists in ursine content with '

+                  'different stream %s, whose RPMs will be excluded.',

+                  name, stream)

+ 

+     return collision_modules

+ 

+ 

+ def record_stream_collision_modules(mmd):

+     """

+     Find out modules from ursine content and record those that are buildrequire

+     module but have different stream.

+ 

+     Note that this depends on the result of module stream expansion.

+ 

+     MBS supports multiple base modules via option conf.base_module_names. A base

+     module name could be platform in most cases, but there could be others for

+     particular cases in practice. So, each expanded base module stored in

+     ``xmd/mbs/buildrequires`` will be handled and will have a new

+     key/value pair ``stream_collision_modules: [N-S-V-C, ...]``. This key/value

+     will then be handled by the module event handler.

+ 

+     As a result, a new item is added xmd/mbs/buildrequires/platform/stream_collision_modules,

+     which is a list of NSVC strings. Each of them is the module added to ursine

+     content by Ursa-Major.

+ 

+     :param mmd: a module's metadata which will be built.

+     :type mmd: Modulemd.Module

+     """

+     unpacked_xmd = glib.from_variant_dict(mmd.get_xmd())

+     buildrequires = unpacked_xmd['mbs']['buildrequires']

+ 

+     for module_name in conf.base_module_names:

+         base_module_info = buildrequires.get(module_name)

+         if base_module_info is None:

+             log.info(

+                 'Base module %s is not a buildrequire of module %s. '

+                 'Skip handling module stream collision for this base module.',

+                 mmd.get_name())

+             continue

+ 

+         modules_nsvc = find_stream_collision_modules(

+             buildrequires, base_module_info['koji_tag'])

+         if modules_nsvc:

+             base_module_info['stream_collision_modules'] = modules_nsvc

+         else:

+             log.info('No stream collision module is found against base module %s.',

+                      module_name)

+ 

+     mmd.set_xmd(glib.dict_values(unpacked_xmd))

file modified
+54 -26

@@ -693,18 +693,28 @@ 

              session.add(build)

  

  

- def make_module(nsvc, requires_list, build_requires_list, base_module=None):

+ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module=None,

+                 filtered_rpms=None, xmd=None, store_to_db=True):

      """

      Creates new models.ModuleBuild defined by `nsvc` string with requires

-     and buildrequires set according to `requires_list` and `build_requires_list`.

+     and buildrequires set according to ``requires_list`` and ``build_requires_list``.

  

      :param str nsvc: name:stream:version:context of a module.

      :param list_of_dicts requires_list: List of dictionaries defining the

          requires in the mmd requires field format.

      :param list_of_dicts build_requires_list: List of dictionaries defining the

          build_requires_list in the mmd build_requires_list field format.

-     :rtype: ModuleBuild

-     :return: New Module Build.

+     :param filtered_rpms: list of filtered RPMs which are added to filter

+         section in module metadata.

+     :type filtered_rpms: list[str]

+     :param dict xmd: a mapping representing XMD section in module metadata. A

+         custom xmd could be passed for testing a particular scenario and some

+         default key/value pairs are added if not present.

+     :param bool store_to_db: whether to store created module metadata to the

+         database.

+     :return: New Module Build if set to store module metadata to database,

+         otherwise the module metadata is returned.

+     :rtype: ModuleBuild or Modulemd.Module

      """

      name, stream, version, context = nsvc.split(":")

      mmd = Modulemd.Module()

@@ -719,30 +729,46 @@ 

      licenses.add("GPL")

      mmd.set_module_licenses(licenses)

  

-     if not isinstance(requires_list, list):

-         requires_list = [requires_list]

-     if not isinstance(build_requires_list, list):

-         build_requires_list = [build_requires_list]

- 

-     xmd = {

-         "mbs": {

-             "buildrequires": {},

-             "requires": {},

-             "commit": "ref_%s" % context,

-             "mse": "true",

-         }

-     }

-     deps_list = []

-     for requires, build_requires in zip(requires_list, build_requires_list):

-         deps = Modulemd.Dependencies()

-         for req_name, req_streams in requires.items():

-             deps.add_requires(req_name, req_streams)

-         for req_name, req_streams in build_requires.items():

-             deps.add_buildrequires(req_name, req_streams)

-         deps_list.append(deps)

-     mmd.set_dependencies(deps_list)

+     if filtered_rpms:

+         rpm_filter_set = Modulemd.SimpleSet()

+         rpm_filter_set.set(filtered_rpms)

+         mmd.set_rpm_filter(rpm_filter_set)

+ 

+     if requires_list is not None and build_requires_list is not None:

+         if not isinstance(requires_list, list):

+             requires_list = [requires_list]

+         if not isinstance(build_requires_list, list):

+             build_requires_list = [build_requires_list]

+ 

+         deps_list = []

+         for requires, build_requires in zip(requires_list,

+                                             build_requires_list):

+             deps = Modulemd.Dependencies()

+             for req_name, req_streams in requires.items():

+                 deps.add_requires(req_name, req_streams)

+             for req_name, req_streams in build_requires.items():

+                 deps.add_buildrequires(req_name, req_streams)

+             deps_list.append(deps)

+         mmd.set_dependencies(deps_list)

+ 

+     # Caller could pass whole xmd including mbs, but if something is missing,

+     # default values are given here.

+     xmd = xmd or {'mbs': {}}

+     xmd_mbs = xmd['mbs']

+     if 'buildrequires' not in xmd_mbs:

+         xmd_mbs['buildrequires'] = {}

+     if 'requires' not in xmd_mbs:

+         xmd_mbs['requires'] = {}

+     if 'commit' not in xmd_mbs:

+         xmd_mbs['commit'] = 'ref_%s' % context

+     if 'mse' not in xmd_mbs:

+         xmd_mbs['mse'] = 'true'

+ 

      mmd.set_xmd(glib.dict_values(xmd))

  

+     if not store_to_db:

+         return mmd

+ 

      module_build = ModuleBuild()

      module_build.name = name

      module_build.stream = stream

@@ -762,6 +788,8 @@ 

      module_build.modulemd = mmd.dumps()

      if base_module:

          module_build.buildrequires.append(base_module)

+     if 'koji_tag' in xmd['mbs']:

+         module_build.koji_tag = xmd['mbs']['koji_tag']

      db.session.add(module_build)

      db.session.commit()

  

file modified
+25 -19

@@ -292,6 +292,7 @@ 

      import moksha.hub.reactor # noqa

  

  

+ @patch('module_build_service.utils.submit.record_stream_collision_modules')

  @patch.object(module_build_service.config.Config, 'system', new_callable=PropertyMock,

                return_value='test')

  @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",

@@ -326,7 +327,7 @@ 

      @pytest.mark.parametrize('mmd_version', [1, 2])

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build(self, mocked_scm, mocked_get_user, conf_system, dbg,

+     def test_submit_build(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc,

                            mmd_version):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which

@@ -392,7 +393,7 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_no_components(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_no_components(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests the build of a module with no components

          """

@@ -420,7 +421,7 @@ 

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

      def test_submit_build_eol_module(self, mocked_scm, mocked_get_user, is_eol, check,

-                                      conf_system, dbg):

+                                      conf_system, dbg, hmsc):

          """ Tests the build of a module with an eol stream.  """

          FakeSCM(mocked_scm, 'python3', 'python3-no-components.yaml',

                  '620ec77321b2ea7b0d67d82992dda3e1d67055b4')

@@ -436,7 +437,7 @@ 

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

      def test_submit_build_from_yaml_not_allowed(

-             self, mocked_scm, mocked_get_user, conf_system, dbg):

+             self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          FakeSCM(mocked_scm, "testmodule", "testmodule.yaml")

  

          testmodule = os.path.join(base_dir, 'staged_data', 'testmodule.yaml')

@@ -454,7 +455,8 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_from_yaml_allowed(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_from_yaml_allowed(

+             self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml',

                  '620ec77321b2ea7b0d67d82992dda3e1d67055b4')

          testmodule = os.path.join(base_dir, 'staged_data', 'testmodule.yaml')

@@ -475,7 +477,7 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_cancel(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_cancel(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Submit all builds for a module and cancel the module build later.

          """

@@ -526,7 +528,8 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_instant_complete(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_instant_complete(

+             self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which

          succeeds everytime.

@@ -559,7 +562,7 @@ 

             new_callable=PropertyMock, return_value=1)

      def test_submit_build_concurrent_threshold(self, conf_num_concurrent_builds,

                                                 mocked_scm, mocked_get_user,

-                                                conf_system, dbg):

+                                                conf_system, dbg, hmsc):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder with

          num_concurrent_builds set to 1.

@@ -603,7 +606,7 @@ 

             new_callable=PropertyMock, return_value=2)

      def test_try_to_reach_concurrent_threshold(self, conf_num_concurrent_builds,

                                                 mocked_scm, mocked_get_user,

-                                                conf_system, dbg):

+                                                conf_system, dbg, hmsc):

          """

          Tests that we try to submit new component build right after

          the previous one finished without waiting for all

@@ -652,7 +655,7 @@ 

      @patch("module_build_service.config.Config.num_concurrent_builds",

             new_callable=PropertyMock, return_value=1)

      def test_build_in_batch_fails(self, conf_num_concurrent_builds, mocked_scm,

-                                   mocked_get_user, conf_system, dbg):

+                                   mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that if the build in batch fails, other components in a batch

          are still build, but next batch is not started.

@@ -711,7 +714,7 @@ 

      @patch("module_build_service.config.Config.num_concurrent_builds",

             new_callable=PropertyMock, return_value=1)

      def test_all_builds_in_batch_fail(self, conf_num_concurrent_builds, mocked_scm,

-                                       mocked_get_user, conf_system, dbg):

+                                       mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that if the build in batch fails, other components in a batch

          are still build, but next batch is not started.

@@ -755,7 +758,7 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that we do not try building module-build-macros when reusing all

          components in a module build.

@@ -806,7 +809,7 @@ 

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

      def test_submit_build_reuse_all_without_build_macros(self, mocked_scm, mocked_get_user,

-                                                          conf_system, dbg):

+                                                          conf_system, dbg, hmsc):

          """

          Tests that we can reuse components even when the reused module does

          not have module-build-macros component.

@@ -858,7 +861,7 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that resuming the build works even when previous batches

          are already built.

@@ -982,7 +985,7 @@ 

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

      def test_submit_build_resume_recover_orphaned_macros(

-             self, mocked_scm, mocked_get_user, conf_system, dbg):

+             self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that resuming the build works when module-build-macros is orphaned but marked as

          failed in the database

@@ -1097,7 +1100,8 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_resume_failed_init(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_resume_failed_init(

+             self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that resuming the build works when the build failed during the init step

          """

@@ -1151,7 +1155,8 @@ 

  

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

-     def test_submit_build_resume_init_fail(self, mocked_scm, mocked_get_user, conf_system, dbg):

+     def test_submit_build_resume_init_fail(

+             self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

          """

          Tests that resuming the build fails when the build is in init state

          """

@@ -1181,7 +1186,7 @@ 

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

      def test_submit_build_repo_regen_not_started_batch(self, mocked_scm, mocked_get_user,

-                                                        conf_system, dbg):

+                                                        conf_system, dbg, hmsc):

          """

          Tests that if MBS starts a new batch, the concurrent component threshold is met before a

          build can start, and an unexpected repo regen occurs, the build will not fail.

@@ -1254,13 +1259,14 @@ 

              except Exception:

                  pass

  

+     @patch('module_build_service.utils.submit.record_stream_collision_modules')

      @patch('module_build_service.auth.get_user', return_value=user)

      @patch('module_build_service.scm.SCM')

      @patch("module_build_service.config.Config.mock_resultsdir",

             new_callable=PropertyMock,

             return_value=path.join(base_dir, 'staged_data', "local_builds"))

      def test_submit_build_local_dependency(

-             self, resultsdir, mocked_scm, mocked_get_user, conf_system):

+             self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc):

          """

          Tests local module build dependency.

          """

@@ -19,9 +19,13 @@ 

  # SOFTWARE.

  #

  # Written by Jan Kaluza <jkaluza@redhat.com>

+ import os

+ import shutil

+ import tempfile

  

  import mock

  import koji

+ 

  try:

      import xmlrpclib

  except ImportError:

@@ -37,7 +41,7 @@ 

  import pytest

  from mock import patch, MagicMock

  

- from tests import conf, init_data, reuse_component_init_data

+ from tests import conf, init_data, reuse_component_init_data, clean_database

  

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  

@@ -634,3 +638,82 @@ 

          current_module = module_build_service.models.ModuleBuild.query.get(3)

          rv = KojiModuleBuilder._get_filtered_rpms_on_self_dep(current_module, br_filtered_rpms)

          assert set(rv) == set(expected)

+ 

+ 

+ class TestGetDistTagSRPM:

+     """Test KojiModuleBuilder.get_disttag_srpm"""

+ 

+     def setup_method(self):

+         clean_database()

+ 

+         self.tmp_srpm_build_dir = tempfile.mkdtemp(prefix='test-koji-builder-')

+         self.spec_file = os.path.join(self.tmp_srpm_build_dir, 'module-build-macros.spec')

+         self.srpms_dir = os.path.join(self.tmp_srpm_build_dir, 'SRPMS')

+         os.mkdir(self.srpms_dir)

+         self.expected_srpm_file = os.path.join(

+             self.srpms_dir, 'module-build-macros.src.rpm')

+ 

+         # Don't care about the content, just assert the existence.

+         with open(self.expected_srpm_file, 'w') as f:

+             f.write('')

+ 

+         module_nsvc = dict(

+             name='testmodule',

+             stream='master',

+             version='1',

+             context=module_build_service.models.DEFAULT_MODULE_CONTEXT

+         )

+ 

+         xmd = {

+             'mbs': {

+                 'buildrequires': {

+                     'modulea': {

+                         'filtered_rpms': ['baz-devel-0:0.1-6.fc28',

+                                           'baz-doc-0:0.1-6.fc28'],

+                     },

+                     'platform': {

+                         'filtered_rpms': [],

+                         'stream_collision_modules': ['modulefoo-s-v-c'],

+                         'ursine_rpms': ['foo-0:1.0-1.fc28', 'bar-0:2.0-1.fc28']

+                     }

+                 },

+                 'koji_tag': 'module-{name}-{stream}-{version}-{context}'

+                 .format(**module_nsvc)

+             }

+         }

+         from tests import make_module

+         self.module_build = make_module(

+             '{name}:{stream}:{version}:{context}'.format(**module_nsvc),

+             xmd=xmd)

+ 

+     def teardown_method(self):

+         shutil.rmtree(self.tmp_srpm_build_dir)

+         clean_database()

+ 

+     @patch('tempfile.mkdtemp')

+     @patch('module_build_service.builder.KojiModuleBuilder.execute_cmd')

+     def _build_srpm(self, execute_cmd, mkdtemp):

+         mkdtemp.return_value = self.tmp_srpm_build_dir

+         return KojiModuleBuilder.get_disttag_srpm('disttag', self.module_build)

+ 

+     def test_return_srpm_file(self):

+         srpm_file = self._build_srpm()

+         assert self.expected_srpm_file == srpm_file

+ 

+     def test_filtered_rpms_are_added(self):

+         self._build_srpm()

+ 

+         with open(self.spec_file, 'r') as f:

+             content = f.read()

+         for nevr in ['baz-devel-0:0.1-6.fc28', 'baz-doc-0:0.1-6.fc28']:

+             assert KojiModuleBuilder.format_conflicts_line(nevr) + '\n' in content

+ 

+     def test_ursine_rpms_are_added(self):

+         self._build_srpm()

+ 

+         with open(self.spec_file, 'r') as f:

+             content = f.read()

+ 

+         assert '# modulefoo-s-v-c\n' in content

+         for nevr in ['foo-0:1.0-1.fc28', 'bar-0:2.0-1.fc28']:

+             assert KojiModuleBuilder.format_conflicts_line(nevr) + '\n' in content

@@ -0,0 +1,324 @@ 

+ # Copyright (c) 2017  Red Hat, Inc.

+ #

+ # Permission is hereby granted, free of charge, to any person obtaining a copy

+ # of this software and associated documentation files (the "Software"), to deal

+ # in the Software without restriction, including without limitation the rights

+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell

+ # copies of the Software, and to permit persons to whom the Software is

+ # furnished to do so, subject to the following conditions:

+ #

+ # The above copyright notice and this permission notice shall be included in all

+ # copies or substantial portions of the Software.

+ #

+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR

+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,

+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE

+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER

+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,

+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

+ # SOFTWARE.

+ 

+ from mock import patch, Mock

+ 

+ from module_build_service import conf, glib

+ from module_build_service.utils import ursine

+ from tests import make_module, clean_database

+ 

+ 

+ class TestFindModuleKojiTags:

+     """Test ursine.find_module_koji_tags"""

+ 

+     @patch.object(conf, 'koji_tag_prefixes', new=['module'])

+     def test_find_out_all_module_koji_tags(self):

+         session = Mock()

+         session.getFullInheritance.return_value = [

+             {'name': 'module-tag1-s-v-c'},

+             {'name': 'module-tag2-s-v-c'},

+             {'name': 'tag-1'},

+         ]

+ 

+         expected_tags = ['module-tag1-s-v-c', 'module-tag2-s-v-c']

+ 

+         tags = ursine.find_module_koji_tags(session, 'tag-a-build')

+         assert expected_tags == tags

+ 

+     @patch.object(conf, 'koji_tag_prefixes', new=['module'])

+     def test_return_empty_if_no_module_koji_tags(self):

+         session = Mock()

+         session.getFullInheritance.return_value = [

+             {'name': 'tag-1'}, {'name': 'tag-2'},

+         ]

+ 

+         tags = ursine.find_module_koji_tags(session, 'tag-a-build')

+         assert [] == tags

+ 

+ 

+ class TestFindUrsineRootTags:

+     """Test ursine.find_build_tags_from_external_repos"""

+ 

+     def setup_method(self):

+         self.koji_session = Mock()

+         self.koji_session.getTag.side_effect = lambda name: \

+             None if name == 'X-build' else {'name': name}

+ 

+     def test_find_build_tags(self):

+         with patch.object(conf, 'koji_external_repo_url_prefix',

+                           new='http://example.com/brewroot/'):

+             tags = ursine.find_build_tags_from_external_repos(self.koji_session, [

+                 {

+                     'external_repo_name': 'tag-1-external-repo',

+                     'url': 'http://example.com/brewroot/repos/tag-1-build/latest/$arch/'

+                 },

+                 {

+                     'external_repo_name': 'tag-2-external-repo',

+                     'url': 'http://example.com/brewroot/repos/tag-2-build/latest/$arch/'

+                 },

+             ])

+ 

+             assert ['tag-1-build', 'tag-2-build'] == tags

+ 

+     def test_return_emtpy_if_no_match_external_repo_url(self):

+         with patch.object(conf, 'koji_external_repo_url_prefix',

+                           new='http://example.com/brewroot/'):

+             tags = ursine.find_build_tags_from_external_repos(self.koji_session, [

+                 {

+                     'external_repo_name': 'tag-1-external-repo',

+                     'url': 'https://another-site.org/repos/tag-1-build/latest/$arch/'

+                 },

+                 {

+                     'external_repo_name': 'tag-2-external-repo',

+                     'url': 'https://another-site.org/repos/tag-2-build/latest/$arch/'

+                 },

+             ])

+ 

+             assert [] == tags

+ 

+     def test_some_tag_is_not_koji_tag(self):

+         with patch.object(conf, 'koji_external_repo_url_prefix',

+                           new='http://example.com/brewroot/'):

+             tags = ursine.find_build_tags_from_external_repos(self.koji_session, [

+                 {

+                     'external_repo_name': 'tag-1-external-repo',

+                     'url': 'http://example.com/brewroot/repos/tag-1-build/latest/$arch/'

+                 },

+                 {

+                     'external_repo_name': 'tag-2-external-repo',

+                     'url': 'http://example.com/brewroot/repos/X-build/latest/$arch/'

+                 },

+             ])

+ 

+             assert ['tag-1-build'] == tags

+ 

+ 

+ class TestGetModulemdsFromUrsineContent:

+     """Test ursine.get_modulemds_from_ursine_content"""

+ 

+     def setup_method(self):

+         clean_database(False)

+ 

+     def teardown_method(self, test_method):

+         clean_database()

+ 

+     @patch('koji.ClientSession')

+     def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):

+         session = ClientSession.return_value

+ 

+         # No module koji_tag in ursine content yet. This will result in empty

+         # ursine modulemds is returned.

+         session.getFullInheritance.return_value = [

+             {'name': 'tag-1.0-build'},

+         ]

+         session.getExternalRepoList.return_value = [

+             {

+                 'external_repo_name': 'tag-1.0-external-repo',

+                 'url': 'http://example.com/repos/tag-4-build/latest/$arch/'

+             }

+         ]

+ 

+         modulemds = ursine.get_modulemds_from_ursine_content('tag')

+         assert [] == modulemds

+ 

+     @patch.object(conf, 'koji_tag_prefixes', new=['module'])

+     @patch('koji.ClientSession')

+     def test_get_modulemds(self, ClientSession):

+         session = ClientSession.return_value

+ 

+         # Ensure to to get build tag for further query of ursine content.

+         # For this test, the build tag is tag-4-build

+         session.getExternalRepoList.return_value = [

+             {

+                 'external_repo_name': 'tag-1.0-external-repo',

+                 'url': 'http://example.com/repos/tag-4-build/latest/$arch/'

+             }

+         ]

+ 

+         # Ensure to return module tags from ursine content of fake build tag

+         # specified in above external repo's url.

+         def mock_getFullInheritance(tag):

+             if tag == 'tag-4-build':

+                 return [

+                     {'name': 'tag-1.0-build'},

+                     # Below two modules should be returned and whose modulemd

+                     # should be also queried from database.

+                     {'name': 'module-name1-s-2020-c'},

+                     {'name': 'module-name2-s-2021-c'},

+                 ]

+             raise ValueError('{} is not handled by test.'.format(tag))

+         session.getFullInheritance.side_effect = mock_getFullInheritance

+ 

+         # Defaults to DB resolver, so create fake module builds and store them

+         # into database to ensure they can be queried.

+         mmd_name1s2020c = make_module(

+             'name1:s:2020:c',

+             xmd={'mbs': {'koji_tag': 'module-name1-s-2020-c'}})

+         mmd_name2s2021c = make_module(

+             'name2:s:2021:c',

+             xmd={'mbs': {'koji_tag': 'module-name2-s-2021-c'}})

+ 

+         koji_tag = 'tag'  # It's ok to use arbitrary tag name.

+         with patch.object(conf, 'koji_external_repo_url_prefix', new='http://example.com/'):

+             modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)

+ 

+         test_nsvcs = [item.dup_nsvc() for item in modulemds]

+         test_nsvcs.sort()

+ 

+         expected_nsvcs = [mmd_name1s2020c.mmd().dup_nsvc(),

+                           mmd_name2s2021c.mmd().dup_nsvc()]

+         expected_nsvcs.sort()

+ 

+         session.getExternalRepoList.assert_called_once_with(koji_tag)

+         assert expected_nsvcs == test_nsvcs

+ 

+ 

+ class TestRecordStreamCollisionModules:

+     """Test ursine.record_stream_collision_modules"""

+ 

+     @patch.object(conf, 'base_module_names', new=['platform'])

+     @patch.object(ursine, 'find_stream_collision_modules')

+     def test_nothing_changed_if_no_base_module_is_in_buildrequires(

+             self, find_stream_collision_modules):

+         xmd = {

+             'mbs': {

+                 'buildrequires': {

+                     'modulea': {'stream': 'master'}

+                 }

+             }

+         }

+         fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False)

+         original_xmd = glib.from_variant_dict(fake_mmd.get_xmd())

+ 

+         with patch.object(ursine, 'log') as log:

+             ursine.record_stream_collision_modules(fake_mmd)

+             log.info.assert_called_once()

+             find_stream_collision_modules.assert_not_called()

+ 

+         assert original_xmd == glib.from_variant_dict(fake_mmd.get_xmd())

+ 

+     @patch.object(conf, 'base_module_names', new=['platform'])

+     @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')

+     def test_nothing_changed_if_no_modules_in_ursine_content(

+             self, get_modulemds_from_ursine_content):

+         xmd = {

+             'mbs': {

+                 'buildrequires': {

+                     'modulea': {'stream': 'master'},

+                     'platform': {'stream': 'master', 'koji_tag': 'module-rhel-8.0-build'},

+                 }

+             }

+         }

+         fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False)

+         original_xmd = glib.from_variant_dict(fake_mmd.get_xmd())

+ 

+         get_modulemds_from_ursine_content.return_value = []

+ 

+         with patch.object(ursine, 'log') as log:

+             ursine.record_stream_collision_modules(fake_mmd)

+             log.info.assert_called()

+ 

+         assert original_xmd == glib.from_variant_dict(fake_mmd.get_xmd())

+ 

+     @patch.object(conf, 'base_module_names', new=['platform', 'project-platform'])

+     @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')

+     def test_add_collision_modules(self, get_modulemds_from_ursine_content):

+         xmd = {

+             'mbs': {

+                 'buildrequires': {

+                     'modulea': {'stream': 'master'},

+                     'foo': {'stream': '1'},

+                     'bar': {'stream': '2'},

+                     'platform': {'stream': 'master', 'koji_tag': 'module-rhel-8.0-build'},

+                     'project-platform': {

+                         'stream': 'master', 'koji_tag': 'module-project-1.0-build'

+                     },

+                 }

+             }

+         }

+         fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False)

+ 

+         def mock_get_ursine_modulemds(koji_tag):

+             if koji_tag == 'module-rhel-8.0-build':

+                 return [

+                     make_module('modulea:10:20180813041838:5ea3b708', store_to_db=False),

+                     make_module('moduleb:1.0:20180113042038:6ea3b105', store_to_db=False),

+                 ]

+             if koji_tag == 'module-project-1.0-build':

+                 return [

+                     make_module('bar:6:20181013041838:817fa3a8', store_to_db=False),

+                     make_module('foo:2:20180113041838:95f078a1', store_to_db=False),

+                 ]

+ 

+         get_modulemds_from_ursine_content.side_effect = mock_get_ursine_modulemds

+ 

+         ursine.record_stream_collision_modules(fake_mmd)

+ 

+         xmd = glib.from_variant_dict(fake_mmd.get_xmd())

+         buildrequires = xmd['mbs']['buildrequires']

+ 

+         assert (['modulea:10:20180813041838:5ea3b708'] ==

+                 buildrequires['platform']['stream_collision_modules'])

+ 

+         modules = buildrequires['project-platform']['stream_collision_modules']

+         modules.sort()

+         expected_modules = ['bar:6:20181013041838:817fa3a8', 'foo:2:20180113041838:95f078a1']

+         assert expected_modules == modules

+ 

+ 

+ class TestFindStreamCollisionModules:

+     """Test ursine.find_stream_collision_modules"""

+ 

+     @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')

+     def test_no_modulemds_found_from_ursine_content(

+             self, get_modulemds_from_ursine_content):

+         get_modulemds_from_ursine_content.return_value = []

+         assert not ursine.find_stream_collision_modules({}, 'koji_tag')

+ 

+     @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')

+     def test_no_collisions_found(self, get_modulemds_from_ursine_content):

+         xmd_mbs_buildrequires = {

+             'modulea': {'stream': 'master'},

+             'moduleb': {'stream': '10'},

+         }

+         get_modulemds_from_ursine_content.return_value = [

+             make_module('moduler:1:1:c1', store_to_db=False),

+             make_module('modules:2:1:c2', store_to_db=False),

+             make_module('modulet:3:1:c3', store_to_db=False),

+         ]

+         assert [] == ursine.find_stream_collision_modules(

+             xmd_mbs_buildrequires, 'koji_tag')

+ 

+     @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')

+     def test_collision_modules_are_found(self, get_modulemds_from_ursine_content):

+         xmd_mbs_buildrequires = {

+             'modulea': {'stream': 'master'},

+             'moduleb': {'stream': '10'},

+         }

+         fake_modules = [

+             make_module('moduler:1:1:c1', store_to_db=False),

+             make_module('moduleb:6:1:c2', store_to_db=False),

+             make_module('modulet:3:1:c3', store_to_db=False),

+         ]

+         get_modulemds_from_ursine_content.return_value = fake_modules

+ 

+         assert [fake_modules[1].dup_nsvc()] == \

+             ursine.find_stream_collision_modules(

+                 xmd_mbs_buildrequires, 'koji_tag')

file modified
+121 -1

@@ -38,7 +38,7 @@ 

  from module_build_service.builder.base import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service import glib, Modulemd

- from tests import app

+ from tests import app, make_module

  

  BASE_DIR = path.abspath(path.dirname(__file__))

  

@@ -1093,3 +1093,123 @@ 

              assert len(local_modules) == 1

              assert local_modules[0].koji_tag.endswith(

                  "/module-platform-f28-3/results")

+ 

+ 

+ class TestRecordFilteredRPMs:

+     """Test submit.record_filtered_rpms"""

+ 

+     def setup_method(self):

+         clean_database(add_platform_module=False)

+ 

+         # Prepare fake data for resolver.get_module_modulemds

+         xmd = {

+             'mbs': {

+                 'buildrequires': {

+                     # This module has its own filtered_rpms, which should be unchanged.

+                     'modulea': {'stream': '201808', 'version': '2', 'context': '00000000',

+                                 'filtered_rpms': ['pkg-1.0-1.fc28']},

+ 

+                     # Whereas no filtered_rpms in this module, filtered rpms should be

+                     # injected eventually.

+                     'moduleb': {'stream': '7', 'version': '2', 'context': '00000000'},

+ 

+                     # For test recording rpms included in stream collision modules

+                     'platform': {'stream': 'f28', 'version': '2', 'context': '00000000',

+                                  # Don't care about platform module's filtered rpms here

+                                  'filtered_rpms': [],

+                                  'stream_collision_modules': ['foo:master:1:00000000',

+                                                               'bar:master:2:00000000']},

+                 }

+             }

+         }

+ 

+         self.mmd = make_module('cool-module:201809:1:00000000', xmd=xmd, store_to_db=False)

+ 

+         # Store buildrequires modules into database in order to be queried in the test

+         for module_name, md_dict in xmd['mbs']['buildrequires'].items():

+             br_module_nsvc = (module_name, md_dict['stream'],

+                               md_dict['version'], md_dict['context'])

+             br_module_xmd = {

+                 'mbs': {

+                     'koji_tag': 'module-{}-{}-{}-{}'.format(*br_module_nsvc),

+                 }

+             }

+             br_module_filtered_rpms = None

+             if module_name == 'moduleb':

+                 # Add this RPM to filter section.

+                 # Test will verify if this package is listed in final filtered_rpms list.

+                 br_module_filtered_rpms = ['foo']

+ 

+             # Modules have to be created in database whose NSVC appears in

+             # stream_collision_modules. This is for testing recording rpms

+             # built for stream collision modules.

+             if module_name == 'platform':

+                 for nsvc in md_dict['stream_collision_modules']:

+                     make_module(nsvc, xmd={'mbs': {

+                         'koji_tag': 'module-{}-{}-{}-{}'.format(*nsvc.split(':'))

+                     }})

+ 

+             make_module('{}:{}:{}:{}'.format(*br_module_nsvc),

+                         filtered_rpms=br_module_filtered_rpms,

+                         xmd=br_module_xmd)

+ 

+     def teardown_method(self):

+         clean_database()

+ 

+     @patch('koji.ClientSession')

+     def test_generate_and_store_filtered_rpms(self, ClientSession):

+ 

+         def mocklistTaggedRPMs(tag, latest):

+             # Result returned from listTaggedRPMs should contain two lists.

+             # The second one is the build, which is not used in test and

+             # omitted here.

+             rpms = {

+                 'module-modulea-201808-2-00000000': [

+                     [

+                         {'name': 'pkg1', 'version': '1.0', 'release': '1.fc28'},

+                         {'name': 'pkg2', 'version': '2.34', 'release': '1.fc28'},

+                     ],

+                 ],

+                 'module-moduleb-7-2-00000000': [

+                     [

+                         {'name': 'foo', 'version': '2.0', 'release': '1.fc28'},

+                         {'name': 'bar', 'version': '1.0', 'release': '1.fc28'},

+                     ],

+                 ],

+ 

+                 # These rpms are built for stream collision modules. See above

+                 # in setup_method.

+                 'module-foo-master-1-00000000': [

+                     [

+                         {'name': 'foo-a', 'version': '1.0', 'release': '1.fc28'},