#1331 Prevent overlapping RPMs from buildrequired base modules from being available when using default modules
Merged 4 years ago by mprahl. Opened 4 years ago by mprahl.

file modified
+1 -2
@@ -90,8 +90,6 @@ 

      # Disable Client Authorization

      NO_AUTH = False

  

-     CACHE_DIR = "~/modulebuild/cache"

- 

  

  class TestConfiguration(BaseConfiguration):

      BUILD_LOGS_DIR = "/tmp"
@@ -134,6 +132,7 @@ 

  

  

  class LocalBuildConfiguration(BaseConfiguration):

+     CACHE_DIR = "~/modulebuild/cache"

      LOG_LEVEL = "debug"

      MESSAGING = "in_memory"

  

@@ -29,6 +29,7 @@ 

  import pkg_resources

  import re

  import sys

+ import tempfile

  

  from six import string_types

  
@@ -146,7 +147,11 @@ 

              "desc": "Default dist-tag prefix for built modules.",

          },

          "polling_interval": {"type": int, "default": 0, "desc": "Polling interval, in seconds."},

-         "cache_dir": {"type": Path, "default": "~/modulebuild/cache", "desc": "Cache directory"},

+         "cache_dir": {

+             "type": Path,

+             "default": os.path.join(tempfile.gettempdir(), "mbs"),

+             "desc": "Cache directory"

+         },

          "mbs_url": {

              "type": str,

              "default": "https://mbs.fedoraproject.org/module-build-service/1/module-builds/",

@@ -18,15 +18,23 @@ 

  # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

+ import errno

+ import os

+ 

+ import dnf

+ import kobo.rpmlib

  import requests

  

  from module_build_service import conf, log, models

+ from module_build_service.builder.KojiModuleBuilder import (

+     koji_retrying_multicall_map, KojiModuleBuilder,

+ )

  from module_build_service.errors import UnprocessableEntity

  from module_build_service.utils.request_utils import requests_session

  from module_build_service.resolver import system_resolver as resolver

  

  

- def add_default_modules(db_session, mmd):

+ def add_default_modules(db_session, mmd, arches):

      """

      Add default modules as buildrequires to the input modulemd.

  
@@ -36,12 +44,15 @@ 

  

      :param db_session: a SQLAlchemy database session

      :param Modulemd.ModuleStream mmd: the modulemd of the module to add the module defaults to

+     :param list arches: the arches to limit the external repo queries to; this should be the arches

+         the module will be built with

      :raises RuntimeError: if the buildrequired base module isn't in the database or the default

          modules list can't be downloaded

      """

      log.info("Finding the default modules to include as buildrequires")

      xmd = mmd.get_xmd()

      buildrequires = xmd["mbs"]["buildrequires"]

+     defaults_added = False

  

      for module_name in conf.base_module_names:

          bm_info = buildrequires.get(module_name)
@@ -131,5 +142,196 @@ 

              nsvc = ":".join([name, stream, resolved[name]["version"], resolved[name]["context"]])

              log.info("Adding the default module %s as a buildrequire", nsvc)

              buildrequires.update(resolved)

+             defaults_added = True

+ 

+     # For now, we only want to run _handle_collisions if default modules were added, otherwise

+     # still rely on the old approach of running ursine.handle_stream_collision_modules. This is

+     # done in the init handler.

+     if defaults_added:

+         mmd.set_xmd(xmd)

+         # For now, only handle collisions when defaults are used. In the future, this can be enabled

+         # for all module builds when Ursa-Major is no longer supported.

+         _handle_collisions(mmd, arches)

+ 

+ 

+ def _handle_collisions(mmd, arches):

+     """

+     Find any RPMs in the buildrequired base modules that collide with the buildrequired modules.

+ 

+     If a buildrequired module contains RPMs that overlap with RPMs in the buildrequired base

+     modules, then the NEVRAs of the overlapping RPMs in the base modules will be added as conflicts

+     in the input modulemd.

+ 

+     :param Modulemd.ModuleStream mmd: the modulemd to find the collisions

+     :param list arches: the arches to limit the external repo queries to

+     :raise RuntimeError: when a Koji query fails

+     """

+     log.info("Finding any buildrequired modules that collide with the RPMs in the base modules")

+     bm_tags = set()

+     non_bm_tags = set()

+     xmd = mmd.get_xmd()

+     buildrequires = xmd["mbs"]["buildrequires"]

+     for name, info in buildrequires.items():

+         if not info["koji_tag"]:

+             continue

  

+         if name in conf.base_module_names:

+             bm_tags.add(info["koji_tag"])

+         else:

+             non_bm_tags.add(info["koji_tag"])

+ 

+     if not (bm_tags and non_bm_tags):

+         log.info(

+             "Skipping the collision check since collisions are not possible with these "

+             "buildrequires"

+         )

+         return

+ 

+     log.debug(

+         "Querying Koji for the latest RPMs from the buildrequired base modules from the tags: %s",

+         ", ".join(bm_tags),

+     )

+     koji_session = KojiModuleBuilder.get_session(conf, login=False)

+     bm_rpms = _get_rpms_from_tags(koji_session, bm_tags, arches)

+     # The keys are base module RPM names and the values are sets of RPM NEVRAs with that name

+     name_to_nevras = {}

+     for bm_rpm in bm_rpms:

+         rpm_name = kobo.rpmlib.parse_nvra(bm_rpm)["name"]

+         name_to_nevras.setdefault(rpm_name, set())

+         name_to_nevras[rpm_name].add(bm_rpm)

+     # Clear this out of RAM as soon as possible since this value can be huge

+     del bm_rpms

+ 

+     log.debug(

+         "Querying Koji for the latest RPMs from the other buildrequired modules from the tags: %s",

+         ", ".join(non_bm_tags),

+     )

+     # This will contain any NEVRAs of RPMs in the base module tag with the same name as those in the

+     # buildrequired modules

+     conflicts = set()

+     non_bm_rpms = _get_rpms_from_tags(koji_session, non_bm_tags, arches)

+     for rpm in non_bm_rpms:

+         rpm_name = kobo.rpmlib.parse_nvra(rpm)["name"]

+         if rpm_name in name_to_nevras:

+             conflicts = conflicts | name_to_nevras[rpm_name]

+ 

+     # Setting these values will keep ursine.handle_stream_collision_modules from running.

+     # These values are handled in KojiModuleBuilder.get_disttag_srpm.

+     xmd["mbs"]["ursine_rpms"] = list(conflicts)

+     xmd["mbs"]["stream_collision_modules"] = []

      mmd.set_xmd(xmd)

+ 

+ 

+ def _get_rpms_from_tags(koji_session, tags, arches):

+     """

+     Get the RPMs in NEVRA form (tagged or external repos) of the input tags.

+ 

+     :param koji.ClientSession koji_session: the Koji session to use to query

+     :param list tags: the list of tags to get the RPMs from

+     :param list arches: the arches to limit the external repo queries to

+     :return: the set of RPMs in NEVRA form of the input tags

+     :rtype: set

+     :raises RuntimeError: if the Koji query fails

+     """

+     log.debug("Get the latest RPMs from the tags: %s", ", ".join(tags))

+     kwargs = [{"latest": True, "inherit": True}] * len(tags)

+     tagged_results = koji_retrying_multicall_map(

+         koji_session, koji_session.listTaggedRPMS, tags, kwargs,

+     )

+     if not tagged_results:

+         raise RuntimeError(

+             "Getting the tagged RPMs of the following Koji tags failed: {}"

+             .format(", ".join(tags))

+         )

+ 

+     nevras = set()

+     for tagged_result in tagged_results:

+         rpms, _ = tagged_result

+         for rpm_dict in rpms:

+             nevra = kobo.rpmlib.make_nvra(rpm_dict, force_epoch=True)

+             nevras.add(nevra)

+ 

+     repo_results = koji_retrying_multicall_map(koji_session, koji_session.getExternalRepoList, tags)

+     if not repo_results:

+         raise RuntimeError(

+             "Getting the external repos of the following Koji tags failed: {}"

+             .format(", ".join(tags)),

+         )

+     for repos in repo_results:

+         for repo in repos:

+             # Use the repo ID in the cache directory name in case there is more than one external

+             # repo associated with the tag

+             cache_dir_name = "{}-{}".format(repo["tag_name"], repo["external_repo_id"])

+             nevras = nevras | _get_rpms_in_external_repo(repo["url"], arches, cache_dir_name)

+ 

+     return nevras

+ 

+ 

+ def _get_rpms_in_external_repo(repo_url, arches, cache_dir_name):

+     """

+     Get the available RPMs in the external repo for the provided arches.

+ 

+     :param str repo_url: the URL of the external repo with the "$arch" variable included

+     :param list arches: the list of arches to query the external repo for

+     :param str cache_dir_name: the cache directory name under f"{conf.cache_dir}/dnf"

+     :return: a set of the RPM NEVRAs

+     :rtype: set

+     :raise RuntimeError: if the cache is not writeable or the external repo couldn't be loaded

+     :raises ValueError: if there is no "$arch" variable in repo URL

+     """

+     if "$arch" not in repo_url:

+         raise ValueError(

+             "The external repo {} does not contain the $arch variable".format(repo_url)

+         )

+ 

+     base = dnf.Base()

+     dnf_conf = base.conf

+     # Expire the metadata right away so that when a repo is loaded, it will always check to see if

+     # the external repo has been updated

+     dnf_conf.metadata_expire = 0

+ 

+     cache_location = os.path.join(conf.cache_dir, "dnf", cache_dir_name)

+     try:

+         # exist_ok=True can't be used in Python 2

+         os.makedirs(cache_location, mode=0o0770)

+     except OSError as e:

+         # Don't fail if the directories already exist

+         if e.errno != errno.EEXIST:

+             log.exception("Failed to create the cache directory %s", cache_location)

+             raise RuntimeError("The MBS cache is not writeable.")

+ 

+     # Tell DNF to use the cache directory

+     dnf_conf.cachedir = cache_location

+     # Get rid of everything to be sure it's a blank slate. This doesn't delete the cached repo data.

+     base.reset(repos=True, goal=True, sack=True)

+ 

+     # Add a separate repo for each architecture

+     for arch in arches:

+         repo_name = "repo_{}".format(arch)

+         repo_arch_url = repo_url.replace("$arch", arch)

+         base.repos.add_new_repo(repo_name, dnf_conf, baseurl=[repo_arch_url])

+         # Load one repo at a time instead of running `base.update_cache()` so that we know which

+         # repo fails to load if one does

+         try:

+             base.repos[repo_name].load()

+         except dnf.exceptions.RepoError:

+             msg = "Failed to load the external repo {}".format(repo_arch_url)

+             log.exception(msg)

+             raise RuntimeError(msg)

+ 

+     base.fill_sack(load_system_repo=False)

+ 

+     # Return all the available RPMs

+     nevras = set()

+     for rpm in base.sack.query().available():

+         rpm_dict = {

+             "arch": rpm.arch,

+             "epoch": rpm.epoch,

+             "name": rpm.name,

+             "release": rpm.release,

+             "version": rpm.version,

+         }

+         nevra = kobo.rpmlib.make_nvra(rpm_dict, force_epoch=True)

+         nevras.add(nevra)

+ 

+     return nevras

@@ -164,7 +164,8 @@ 

      failure_reason = "unspec"

      try:

          mmd = build.mmd()

-         add_default_modules(session, mmd)

+         arches = [arch.name for arch in build.arches]

+         add_default_modules(session, mmd, arches)

          record_module_build_arches(mmd, build, session)

          record_component_builds(mmd, build, session=session)

          # The ursine.handle_stream_collision_modules is Koji specific.

@@ -28,6 +28,7 @@ 

            stream: f28

            version: '3'

            context: '00000000'

+           koji_tag: 'module-f28-build'

        commit: 65a7721ee4eff44d2a63fb8f3a8da6e944ab7f4d

        requires:

          platform:

@@ -17,21 +17,24 @@ 

  # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

- 

+ from collections import namedtuple

+ import errno

  import textwrap

  

- from mock import patch

+ import dnf

+ from mock import call, Mock, patch

  import pytest

  import requests

  

  from module_build_service.models import ModuleBuild

- from module_build_service.scheduler.default_modules import add_default_modules

+ from module_build_service.scheduler import default_modules

  from module_build_service.utils.general import load_mmd, mmd_to_str

  from tests import clean_database, make_module, read_staged_data

  

  

+ @patch("module_build_service.scheduler.default_modules._handle_collisions")

  @patch("module_build_service.scheduler.default_modules.requests_session")

- def test_add_default_modules(mock_requests_session, db_session):

+ def test_add_default_modules(mock_requests_session, mock_hc, db_session):

      """

      Test that default modules present in the database are added, and the others are ignored.

      """
@@ -66,11 +69,12 @@ 

          ruby:2.6

          some invalid stuff

      """)

-     add_default_modules(db_session, mmd)

+     default_modules.add_default_modules(db_session, mmd, ["x86_64"])

      # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in

      # the database

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform", "python"}

      mock_requests_session.get.assert_called_once_with(default_modules_url, timeout=10)

+     mock_hc.assert_called_once()

  

  

  @patch("module_build_service.scheduler.default_modules.requests_session")
@@ -81,7 +85,7 @@ 

      clean_database()

      mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}

-     add_default_modules(db_session, mmd)

+     default_modules.add_default_modules(db_session, mmd, ["x86_64"])

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}

      mock_requests_session.get.assert_not_called()

  
@@ -98,7 +102,7 @@ 

  

      expected_error = "Failed to retrieve the module platform:f28:3:00000000 from the database"

      with pytest.raises(RuntimeError, match=expected_error):

-         add_default_modules(db_session, mmd)

+         default_modules.add_default_modules(db_session, mmd, ["x86_64"])

  

  

  @pytest.mark.parametrize("connection_error", (True, False))
@@ -142,4 +146,249 @@ 

          expected_error = "Failed to retrieve the default modules for platform:f28:3:00000000"

  

      with pytest.raises(RuntimeError, match=expected_error):

-         add_default_modules(db_session, mmd)

+         default_modules.add_default_modules(db_session, mmd, ["x86_64"])

+ 

+ 

+ @patch("module_build_service.scheduler.default_modules.KojiModuleBuilder.get_session")

+ @patch("module_build_service.scheduler.default_modules._get_rpms_from_tags")

+ def test_handle_collisions(mock_grft, mock_get_session):

+     """

+     Test that _handle_collisions will add conflicts for NEVRAs in the modulemd.

+     """

+     mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))

+     xmd = mmd.get_xmd()

+     xmd["mbs"]["buildrequires"]["platform"]["koji_tag"] = "module-el-build"

+     xmd["mbs"]["buildrequires"]["python"] = {"koji_tag": "module-python27"}

+     xmd["mbs"]["buildrequires"]["bash"] = {"koji_tag": "module-bash"}

+     mmd.set_xmd(xmd)

+ 

+     bm_rpms = {

+         "bash-completion-1:2.7-5.el8.noarch",

+         "bash-0:4.4.19-7.el8.aarch64",

+         "python2-tools-0:2.7.16-11.el8.aarch64",

+         "python2-tools-0:2.7.16-11.el8.x86_64",

+         "python3-ldap-0:3.1.0-4.el8.aarch64",

+         "python3-ldap-0:3.1.0-4.el8.x86_64",

+     }

+     non_bm_rpms = {

+         "bash-0:4.4.20-1.el8.aarch64",

+         "python2-tools-0:2.7.18-1.module+el8.1.0+3568+bbd875cb.aarch64",

+         "python2-tools-0:2.7.18-1.module+el8.1.0+3568+bbd875cb.x86_64",

+     }

+     mock_grft.side_effect = [bm_rpms, non_bm_rpms]

+ 

+     default_modules._handle_collisions(mmd, ["aarch64", "x86_64"])

+ 

+     mock_get_session.assert_called_once()

+     xmd_mbs = mmd.get_xmd()["mbs"]

+     assert set(xmd_mbs["ursine_rpms"]) == set([

+         "bash-0:4.4.19-7.el8.aarch64",

+         "python2-tools-0:2.7.16-11.el8.aarch64",

+         "python2-tools-0:2.7.16-11.el8.x86_64",

+     ])

+     mock_grft.mock_calls == [

+         call(

+             mock_get_session.return_value,

+             {"module-el-build"},

+             ["aarch64", "x86_64"],

+         ),

+         call(

+             mock_get_session.return_value,

+             {"module-bash", "module-python27"},

+             ["aarch64", "x86_64"],

+         ),

+     ]

+ 

+ 

+ @patch("module_build_service.scheduler.default_modules.koji_retrying_multicall_map")

+ @patch("module_build_service.scheduler.default_modules._get_rpms_in_external_repo")

+ def test_get_rpms_from_tags(mock_grier, mock_multicall_map):

+     """

+     Test the function queries Koji for the tags' and the tags' external repos' for RPMs.

+     """

+     mock_session = Mock()

+     bash_tagged = [

+         [

+             {

+                 "arch": "aarch64",

+                 "epoch": 0,

+                 "name": "bash",

+                 "version": "4.4.20",

+                 "release": "1.module+el8.1.0+123+bbd875cb",

+             },

+             {

+                 "arch": "x86_64",

+                 "epoch": 0,

+                 "name": "bash",

+                 "version": "4.4.20",

+                 "release": "1.module+el8.1.0+123+bbd875cb",

+             }

+         ],

+         None,

+     ]

+     python_tagged = [

+         [

+             {

+                 "arch": "aarch64",

+                 "epoch": 0,

+                 "name": "python2-tools",

+                 "version": "2.7.18",

+                 "release": "1.module+el8.1.0+3568+bbd875cb",

+             },

+             {

+                 "arch": "x86_64",

+                 "epoch": 0,

+                 "name": "python2-tools",

+                 "version": "2.7.18",

+                 "release": "1.module+el8.1.0+3568+bbd875cb",

+             }

+         ],

+         None,

+     ]

+     bash_repos = []

+     external_repo_url = "http://domain.local/repo/latest/$arch/"

+     python_repos = [{

+         "external_repo_id": "12",

+         "tag_name": "module-python27",

+         "url": external_repo_url,

+     }]

+     mock_multicall_map.side_effect = [

+         [bash_tagged, python_tagged],

+         [bash_repos, python_repos],

+     ]

+     mock_grier.return_value = {

+         "python2-test-0:2.7.16-11.module+el8.1.0+3568+bbd875cb.aarch64",

+         "python2-test-0:2.7.16-11.module+el8.1.0+3568+bbd875cb.x86_64",

+     }

+ 

+     tags = ["module-bash", "module-python27"]

+     arches = ["aarch64", "x86_64"]

+     rv = default_modules._get_rpms_from_tags(mock_session, tags, arches)

+ 

+     expected = {

+         "bash-0:4.4.20-1.module+el8.1.0+123+bbd875cb.aarch64",

+         "bash-0:4.4.20-1.module+el8.1.0+123+bbd875cb.x86_64",

+         "python2-tools-0:2.7.18-1.module+el8.1.0+3568+bbd875cb.aarch64",

+         "python2-tools-0:2.7.18-1.module+el8.1.0+3568+bbd875cb.x86_64",

+         "python2-test-0:2.7.16-11.module+el8.1.0+3568+bbd875cb.aarch64",

+         "python2-test-0:2.7.16-11.module+el8.1.0+3568+bbd875cb.x86_64",

+     }

+     assert rv == expected

+     assert mock_multicall_map.call_count == 2

+     mock_grier.assert_called_once_with(external_repo_url, arches, "module-python27-12")

+ 

+ 

+ @patch("module_build_service.scheduler.default_modules.koji_retrying_multicall_map")

+ def test_get_rpms_from_tags_error_listTaggedRPMS(mock_multicall_map):

+     """

+     Test that an exception is raised if the listTaggedRPMS Koji query fails.

+     """

+     mock_session = Mock()

+     mock_multicall_map.return_value = None

+ 

+     tags = ["module-bash", "module-python27"]

+     arches = ["aarch64", "x86_64"]

+     expected = (

+         "Getting the tagged RPMs of the following Koji tags failed: module-bash, module-python27"

+     )

+     with pytest.raises(RuntimeError, match=expected):

+         default_modules._get_rpms_from_tags(mock_session, tags, arches)

+ 

+ 

+ @patch("module_build_service.scheduler.default_modules.koji_retrying_multicall_map")

+ def test_get_rpms_from_tags_error_getExternalRepoList(mock_multicall_map):

+     """

+     Test that an exception is raised if the getExternalRepoList Koji query fails.

+     """

+     mock_session = Mock()

+     mock_multicall_map.side_effect = [[[[], []]], None]

+ 

+     tags = ["module-bash", "module-python27"]

+     arches = ["aarch64", "x86_64"]

+     expected = (

+         "Getting the external repos of the following Koji tags failed: module-bash, module-python27"

+     )

+     with pytest.raises(RuntimeError, match=expected):

+         default_modules._get_rpms_from_tags(mock_session, tags, arches)

+ 

+ 

+ @patch("dnf.Base")

+ @patch("os.makedirs")

+ def test_get_rpms_in_external_repo(mock_makedirs, mock_dnf_base):

+     """

+     Test that DNF can query the external repos for the available packages.

+     """

+     RPM = namedtuple("RPM", ["arch", "epoch", "name", "release", "version"])

+     mock_dnf_base.return_value.sack.query.return_value.available.return_value = [

+         RPM("aarch64", 0, "python", "1.el8", "2.7"),

+         RPM("aarch64", 0, "python", "1.el8", "3.7"),

+         RPM("x86_64", 0, "python", "1.el8", "2.7"),

+         RPM("x86_64", 0, "python", "1.el8", "3.7"),

+     ]

+ 

+     external_repo_url = "http://domain.local/repo/latest/$arch/"

+     arches = ["aarch64", "x86_64"]

+     cache_dir_name = "module-el-build-12"

+     rv = default_modules._get_rpms_in_external_repo(external_repo_url, arches, cache_dir_name)

+ 

+     expected = {

+         "python-0:2.7-1.el8.aarch64",

+         "python-0:3.7-1.el8.aarch64",

+         "python-0:2.7-1.el8.x86_64",

+         "python-0:3.7-1.el8.x86_64",

+     }

+     assert rv == expected

+ 

+ 

+ def test_get_rpms_in_external_repo_invalid_repo_url():

+     """

+     Test that an exception is raised when an invalid repo URL is passed in.

+     """

+     external_repo_url = "http://domain.local/repo/latest/"

+     arches = ["aarch64", "x86_64"]

+     cache_dir_name = "module-el-build-12"

+     expected = (

+         r"The external repo http://domain.local/repo/latest/ does not contain the \$arch variable"

+     )

+     with pytest.raises(ValueError, match=expected):

+         default_modules._get_rpms_in_external_repo(external_repo_url, arches, cache_dir_name)

+ 

+ 

+ @patch("dnf.Base")

+ @patch("os.makedirs")

+ def test_get_rpms_in_external_repo_failed_to_load(mock_makedirs, mock_dnf_base):

+     """

+     Test that an exception is raised when an external repo can't be loaded.

+     """

+     class FakeRepo(dict):

+         @staticmethod

+         def add_new_repo(*args, **kwargs):

+             pass

+ 

+     mock_repo = Mock()

+     mock_repo.load.side_effect = dnf.exceptions.RepoError("Failed")

+     mock_dnf_base.return_value.repos = FakeRepo(repo_aarch64=mock_repo)

+ 

+     external_repo_url = "http://domain.local/repo/latest/$arch/"

+     arches = ["aarch64", "x86_64"]

+     cache_dir_name = "module-el-build-12"

+     expected = "Failed to load the external repo http://domain.local/repo/latest/aarch64/"

+     with pytest.raises(RuntimeError, match=expected):

+         default_modules._get_rpms_in_external_repo(external_repo_url, arches, cache_dir_name)

+ 

+ 

+ @patch("os.makedirs")

+ def test_get_rpms_in_external_repo_failed_to_create_cache(mock_makedirs):

+     """

+     Test that an exception is raised when the cache can't be created.

+     """

+     exc = OSError()

+     exc.errno = errno.EACCES

+     mock_makedirs.side_effect = exc

+ 

+     external_repo_url = "http://domain.local/repo/latest/$arch/"

+     arches = ["aarch64", "x86_64"]

+     cache_dir_name = "module-el-build-12"

+     expected = "The MBS cache is not writeable."

+     with pytest.raises(RuntimeError, match=expected):

+         default_modules._get_rpms_in_external_repo(external_repo_url, arches, cache_dir_name)

When using default modules, this feature will add conflicts to module-build-macros for every RPM in a buildrequired base module that overlaps with RPMs in the buildrequired modules. This will prevent them from being available in the buildroot, and thus ensure that the RPMs from the buildrequired modules (non-base modules) are used even if they have a lower NVR.

2 new commits added

  • Prevent overlapping RPMs from buildrequired base modules from being available when using default modules
  • Default the cache directory to the "mbs" directory under tempfile.gettempdir()
4 years ago

2 new commits added

  • Prevent overlapping RPMs from buildrequired base modules from being available when using default modules
  • Default the cache directory to the "mbs" directory under tempfile.gettempdir()
4 years ago

2 new commits added

  • Prevent overlapping RPMs from buildrequired base modules from being available when using default modules
  • Default the cache directory to the "mbs" directory under tempfile.gettempdir()
4 years ago

Build #236 failed (commit: 292ce350e0acc13c0dffdfb19eec4136823362a8).
Rebase or make new commits to rebuild.

Build #237 failed (commit: 6552fa21c1e9c5e1007be5542e0be4eb275629e0).
Rebase or make new commits to rebuild.

This mmd.set_xmd(xmd) is not related to this feature directly right? It should have been done even before this commit to propagate buildrequires.update(resolved) to `mmd.

FYI, you can also achieve the same with [{"latest": True, "inherit": True}] * len(tags)

I'm not sure what are the possible exceptions os.makedirs can raise, but this code presumes that any exception except the EACCES means that "directory already exist". I think it would be better to change this to:

if e.errno != errno.EEXIST:

It looks good to me, just one issue with EEXIST.

This mmd.set_xmd(xmd) is not related to this feature directly right? It should have been done even before this commit to propagate buildrequires.update(resolved) to `mmd.

I don't quite understand your comment. I'm just not setting xmd if no defaults were added to the method since nothing changed. That if statement should have been there in the previous PR, it was just an oversight on my part.

I'm not sure what are the possible exceptions os.makedirs can raise, but this code presumes that any exception except the EACCES means that "directory already exist". I think it would be better to change this to:
if e.errno != errno.EEXIST:

Nice suggestion!

FYI, you can also achieve the same with [{"latest": True, "inherit": True}] * len(tags)

Good suggestion.

2 new commits added

  • Prevent overlapping RPMs from buildrequired base modules from being available when using default modules
  • Default the cache directory to the "mbs" directory under tempfile.gettempdir()
4 years ago

@jkaluza I addressed your comments. Is this good to merge?

Build #239 failed (commit: 82ec694).
Rebase or make new commits to rebuild.

+1, c3i fail looks like some infra issue.

I didn't read code, but default modules should override non-modular RPMs

I didn't read code, but default modules should override non-modular RPMs

That's the idea. Any buildrequired module that's not platform, including default modules, will override the RPMs from platform by adding conflicts to module-build-macros.

+1, c3i fail looks like some infra issue.

I was able to rerun the C3I tests and they now succeed. I'll merge this.

Pull-Request has been merged by mprahl

4 years ago