#548 WIP: Add support for modular composes
Merged 7 years ago by lsedlar. Opened 7 years ago by jkaluza.
jkaluza/pungi master  into  master

file modified
+5 -1
@@ -493,7 +493,7 @@ 

              },

              "gather_source": {

                  "type": "string",

-                 "enum": ["json", "comps", "none"],

+                 "enum": ["module", "json", "comps", "none"],

              },

              "gather_fulltree": {

                  "type": "boolean",
@@ -614,6 +614,10 @@ 

              "global_target": {"type": "string"},

              "global_release": {"$ref": "#/definitions/optional_string"},

  

+             "pdc_url": {"type": "string"},

+             "pdc_develop": {"type": "boolean", "default": False},

+             "pdc_insecure": {"type": "boolean", "default": False},

+ 

              "koji_profile": {"type": "string"},

  

              "pkgset_koji_tag": {"type": "string"},

@@ -181,6 +181,24 @@ 

              # this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)

              shutil.copy2(product_id_path, os.path.join(repo_dir, "repodata", "productid"))

  

+     # call modifyrepo to inject modulemd if needed

+     if variant.mmds:

+         import yaml

+         modules = {"modules": []}

+         for mmd in variant.mmds:

+             modules["modules"].append(mmd.dumps())

+         tmp_dir = compose.mkdtemp(prefix="pungi_")

+         modules_path = os.path.join(tmp_dir, "modules.yaml")

+         with open(modules_path, "w") as outfile:

+             outfile.write(yaml.safe_dump(modules))

+         cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"),

+                                       modules_path, mdtype="modules",

+                                       compress_type="gz")

+         log_file = compose.paths.log.log_file(

+             arch, "modifyrepo-modules-%s" % variant)

+         run(cmd, logfile=log_file, show_cmd=True)

+         shutil.rmtree(tmp_dir)

+ 

      compose.log_info("[DONE ] %s" % msg)

  

  

@@ -0,0 +1,44 @@ 

+ # -*- coding: utf-8 -*-

+ 

+ 

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; version 2 of the License.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU Library General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License

+ # along with this program; if not, see <https://gnu.org/licenses/>.

+ 

+ 

+ """

+ Get a package list based on modulemd metadata loaded in pkgset phase.

+ """

+ 

+ 

+ from pungi.wrappers.comps import CompsWrapper

+ import pungi.phases.gather.source

+ import kobo.rpmlib

+ 

+ 

+ class GatherSourceModule(pungi.phases.gather.source.GatherSourceBase):

+     enabled = True

+ 

+     def __call__(self, arch, variant):

+         groups = set()

+         packages = set()

+ 

+         if variant is not None and variant.modules:

+             rpms = variant.pkgset.rpms_by_arch[arch] + \

+                 variant.pkgset.rpms_by_arch["noarch"]

+             for rpm_obj in rpms:

+                 for mmd in variant.mmds:

+                     srpm = kobo.rpmlib.parse_nvr(rpm_obj.sourcerpm)["name"]

+                     if (srpm in mmd.components.rpms.keys() and

+                             rpm_obj.name not in mmd.filter.rpms):

+                         packages.add((rpm_obj.name, None))

+ 

+         return packages, groups

@@ -148,15 +148,18 @@ 

          seen_sourcerpms = set()

          # {Exclude,Exclusive}Arch must match *tree* arch + compatible native

          # arches (excluding multilib arches)

-         exclusivearch_list = get_valid_arches(primary_arch, multilib=False,

-                                               add_noarch=False, add_src=False)

+         if primary_arch:

+             exclusivearch_list = get_valid_arches(

+                 primary_arch, multilib=False, add_noarch=False, add_src=False)

+         else:

+             exclusivearch_list = None

          for arch in arch_list:

              self.rpms_by_arch.setdefault(arch, [])

              for i in other.rpms_by_arch.get(arch, []):

                  if i.file_path in self.file_cache:

                      # TODO: test if it really works

                      continue

-                 if arch == "noarch":

+                 if exclusivearch_list and arch == "noarch":

                      if i.excludearch and set(i.excludearch) & set(exclusivearch_list):

                          self.log_debug("Excluding (EXCLUDEARCH: %s): %s"

                                         % (sorted(set(i.excludearch)), i.file_name))

@@ -28,6 +28,84 @@ 

  

  import pungi.phases.pkgset.source

  

+ try:

+     from pdc_client import PDCClient

+     import modulemd

+     WITH_MODULES = True

+ except:

+     WITH_MODULES = False

+ 

+ 

+ def get_pdc_client_session(compose):

+     if not WITH_MODULES:

+         compose.log_warning("pdc_client module is not installed, "

+                             "support for modules is disabled")

+         return None

+     try:

+         return PDCClient(

+             server=compose.conf['pdc_url'],

+             develop=compose.conf['pdc_develop'],

+             ssl_verify=not compose.conf['pdc_insecure'],

+         )

+     except KeyError:

+         return None

+ 

+ 

+ def variant_dict_from_str(module_str):

+     module_info = {}

+ 

+     release_start = module_str.rfind('-')

+     module_info['variant_version'] = module_str[release_start+1:]

+     module_info['variant_id'] = module_str[:release_start]

+     module_info['variant_type'] = 'module'

+ 

+     return module_info

+ 

+ 

+ def get_module(session, module_info, strict=False):

+     """

+     :param session : PDCClient instance

+     :param module_info: pdc variant_dict, str, mmd or module dict

+     :param strict: Normally this function returns None if no module can be

+            found.  If strict=True, then a ValueError is raised.

+ 

+     :return final list of module_info which pass repoclosure

+     """

+ 

+     module_info = variant_dict_from_str(module_info)

+ 

+     query = dict(

+         variant_id=module_info['variant_id'],

+         variant_version=module_info['variant_version'],

+     )

+     if module_info.get('variant_release'):

+         query['variant_release'] = module_info['variant_release']

+ 

+     retval = session['unreleasedvariants'](page_size=-1, **query)

+ 

+     # Error handling

+     if not retval:

+         if strict:

+             raise ValueError("Failed to find module in PDC %r" % query)

+         else:

+             return None

+ 

+     module = None

+     # If we specify 'variant_release', we expect only single module to be

+     # returned, but otherwise we have to pick the one with the highest

+     # release ourselves.

+     if 'variant_release' in query:

+         assert len(retval) <= 1, compose.log_error(

+             "More than one module returned from PDC: %s" % str(retval))

+         module = retval[0]

+     else:

+         module = retval[0]

+         for m in retval:

+             if int(m['variant_release']) > int(module['variant_release']):

+                 module = m

+ 

+     return module

+ 

  

  class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):

      enabled = True
@@ -44,9 +122,7 @@ 

  

  def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):

      event_info = get_koji_event_info(compose, koji_wrapper)

-     tag_info = get_koji_tag_info(compose, koji_wrapper)

- 

-     pkgset_global = populate_global_pkgset(compose, koji_wrapper, path_prefix, tag_info, event_info)

+     pkgset_global = populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)

      package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)

      package_sets["global"] = pkgset_global

  
@@ -58,32 +134,90 @@ 

      return package_sets

  

  

- def populate_global_pkgset(compose, koji_wrapper, path_prefix, compose_tag, event_id):

+ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):

      all_arches = set(["src"])

      for arch in compose.get_arches():

          is_multilib = is_arch_multilib(compose.conf, arch)

          arches = get_valid_arches(arch, is_multilib)

          all_arches.update(arches)

  

-     compose_tag = compose.conf["pkgset_koji_tag"]

+     # List of compose tags from which we create this compose

+     compose_tags = []

+ 

+     # List of compose_tags per variant

+     variant_tags = {}

+ 

+     session = get_pdc_client_session(compose)

+     for variant in compose.all_variants.values():

+         variant.pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(

+             koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,

+             arches=all_arches)

+         variant_tags[variant] = []

+ 

+         # Find out all modules in every variant and add their compose tags

+         # to compose_tags list.

+         if session:

+             for module in variant.get_modules():

+                 pdc_module = get_module(session, module["name"])

+                 mmd = modulemd.ModuleMetadata()

+                 mmd.loads(pdc_module["modulemd"])

+                 tag = pdc_module["koji_tag"]

+                 variant.mmds.append(mmd)

+                 variant_tags[variant].append(tag)

+                 if tag not in compose_tags:

+                     compose_tags.append(tag)

+ 

+         if not variant_tags[variant]:

+             variant_tags[variant].append(compose.conf["pkgset_koji_tag"])

+ 

+     # In case we have no compose tag from module, use the default

+     # one from config.

+     if not compose_tags:

+         compose_tags.append(compose.conf["pkgset_koji_tag"])

+ 

      inherit = compose.conf["pkgset_koji_inherit"]

-     msg = "Populating the global package set from tag '%s'" % compose_tag

-     global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "pkgset_global.pickle")

+     global_pkgset_path = os.path.join(

+         compose.paths.work.topdir(arch="global"), "pkgset_global.pickle")

      if compose.DEBUG and os.path.isfile(global_pkgset_path):

+         msg = "Populating the global package set from tag '%s'" % compose_tags

          compose.log_warning("[SKIP ] %s" % msg)

-         pkgset = pickle.load(open(global_pkgset_path, "r"))

+         global_pkgset = pickle.load(open(global_pkgset_path, "r"))

      else:

-         compose.log_info(msg)

-         pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(koji_wrapper, compose.conf["sigkeys"],

-                                                             logger=compose._logger, arches=all_arches)

-         pkgset.populate(compose_tag, event_id, inherit=inherit)

+         global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(

+             koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,

+             arches=all_arches)

+         # Get package set for each compose tag and merge it to global package

+         # list. Also prepare per-variant pkgset, because we do not have list

+         # of binary RPMs in module definition - there is just list of SRPMs.

+         for compose_tag in compose_tags:

+             compose.log_info("Populating the global package set from tag "

+                              "'%s'" % compose_tag)

+             pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(

+                 koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,

+                 arches=all_arches)

+             pkgset.populate(compose_tag, event_id, inherit=inherit)

+             for variant in compose.all_variants.values():

+                 if compose_tag in variant_tags[variant]:

+                     # Optimization for case where we have just single compose

+                     # tag - we do not have to merge in this case...

+                     if len(compose_tags) == 1:

+                         variant.pkgset = pkgset

+                     else:

+                         variant.pkgset.merge(pkgset, None, list(all_arches))

+             # Optimization for case where we have just single compose

+             # tag - we do not have to merge in this case...

+             if len(compose_tags) == 1:

+                 global_pkgset = pkgset

+             else:

+                 global_pkgset.merge(pkgset, None, list(all_arches))

          with open(global_pkgset_path, 'w') as f:

-             f.write(pickle.dumps(pkgset))

+             f.write(pickle.dumps(global_pkgset))

  

      # write global package list

-     pkgset.save_file_list(compose.paths.work.package_list(arch="global"),

-                           remove_path_prefix=path_prefix)

-     return pkgset

+     global_pkgset.save_file_list(

+         compose.paths.work.package_list(arch="global"),

+         remove_path_prefix=path_prefix)

+     return global_pkgset

  

  

  def get_koji_event_info(compose, koji_wrapper):
@@ -106,22 +240,3 @@ 

          json.dump(result, open(event_file, "w"))

      compose.log_info("Koji event: %s" % result["id"])

      return result

- 

- 

- def get_koji_tag_info(compose, koji_wrapper):

-     koji_proxy = koji_wrapper.koji_proxy

-     tag_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-tag")

-     msg = "Getting a koji tag info"

-     if compose.DEBUG and os.path.exists(tag_file):

-         compose.log_warning("[SKIP ] %s" % msg)

-         result = json.load(open(tag_file, "r"))

-     else:

-         compose.log_info(msg)

-         tag_name = compose.conf["pkgset_koji_tag"]

-         result = koji_proxy.getTag(tag_name)

-         if result is None:

-             raise ValueError("Unknown koji tag: %s" % tag_name)

-         result["name"] = tag_name

-         json.dump(result, open(tag_file, "w"))

-     compose.log_info("Koji compose tag: %(name)s (ID: %(id)s)" % result)

-     return result

file modified
+36 -1
@@ -77,6 +77,7 @@ 

              "type": str(variant_node.attrib["type"]),

              "arches": [str(i) for i in variant_node.xpath("arches/arch/text()")],

              "groups": [],

+             "modules": [],

              "environments": [],

              "buildinstallpackages": [],

              "is_empty": bool(variant_node.attrib.get("is_empty", False)),
@@ -108,6 +109,15 @@ 

  

                  variant_dict["groups"].append(group)

  

+         for modulelist_node in variant_node.xpath("modules"):

+             for module_node in modulelist_node.xpath("module"):

+                 module = {

+                     "name": str(module_node.text),

+                     "glob": self._is_true(module_node.attrib.get("glob", "false"))

+                 }

+ 

+                 variant_dict["modules"].append(module)

+ 

          for environments_node in variant_node.xpath("environments"):

              for environment_node in environments_node.xpath("environment"):

                  environment = {
@@ -198,9 +208,11 @@ 

  

  class Variant(object):

      def __init__(self, id, name, type, arches, groups, environments=None,

-                  buildinstallpackages=None, is_empty=False, parent=None):

+                  buildinstallpackages=None, is_empty=False, parent=None,

+                  modules=None):

  

          environments = environments or []

+         modules = modules or []

          buildinstallpackages = buildinstallpackages or []

  

          self.id = id
@@ -209,11 +221,16 @@ 

          self.arches = sorted(copy.deepcopy(arches))

          self.groups = sorted(copy.deepcopy(groups), lambda x, y: cmp(x["name"], y["name"]))

          self.environments = sorted(copy.deepcopy(environments), lambda x, y: cmp(x["name"], y["name"]))

+         self.modules = sorted(copy.deepcopy(modules),

+                               lambda x, y: cmp(x["name"], y["name"]))

          self.buildinstallpackages = sorted(buildinstallpackages)

          self.variants = {}

          self.parent = parent

          self.is_empty = is_empty

  

+         self.pkgset = None

+         self.mmds = []

+ 

      def __getitem__(self, name):

          return self.variants[name]

  
@@ -274,6 +291,22 @@ 

                      result.append(group)

          return result

  

+     def get_modules(self, arch=None, types=None, recursive=False):

+         """Return list of groups, default types is ["self"]"""

+ 

+         types = types or ["self"]

+         result = copy.deepcopy(self.modules)

+         for variant in self.get_variants(arch=arch, types=types,

+                                          recursive=recursive):

+             if variant == self:

+                 # XXX

+                 continue

+             for module in variant.get_modules(arch=arch, types=types,

+                                               recursive=recursive):

+                 if module not in result:

+                     result.append(module)

+         return result

+ 

      def get_variants(self, arch=None, types=None, recursive=False):

          """

          Return all variants of given arch and types.
@@ -339,6 +372,8 @@ 

              print("    ARCHES: %s" % ", ".join(sorted(i.arches)))

              for group in i.groups:

                  print("    GROUP:  %(name)-40s GLOB: %(glob)-5s DEFAULT: %(default)-5s USERVISIBLE: %(uservisible)-5s" % group)

+             for module in i.modules:

+                 print("    MODULE:  %(name)-40s GLOB: %(glob)-5s DEFAULT: %(default)-5s USERVISIBLE: %(uservisible)-5s" % module)

              for env in i.environments:

                  print("    ENV:    %(name)-40s DISPLAY_ORDER: %(display_order)s" % env)

              print()

file modified
+8 -1
@@ -1,6 +1,6 @@ 

  <!ELEMENT variants (ref*,variant*)>

  

- <!ELEMENT variant (release?,arches,groups?,environments*,variants*,buildinstallpackages?)?>

+ <!ELEMENT variant (release?,arches,groups?,environments*,variants*,buildinstallpackages?,modules?)?>

  <!ATTLIST variant

      id CDATA #REQUIRED

      name CDATA #REQUIRED
@@ -27,6 +27,13 @@ 

      uservisible (true|false) #IMPLIED

  >

  

+ <!ELEMENT modules (module)+>

+ 

+ <!ELEMENT module (#PCDATA)>

+ <!ATTLIST module

+     glob (true|false) #IMPLIED

+ >

+ 

  <!ELEMENT environments (environment)+>

  

  <!ELEMENT environment (#PCDATA)>

file modified
+1
@@ -33,6 +33,7 @@ 

      def __init__(self, *args, **kwargs):

          super(MockVariant, self).__init__(*args, **kwargs)

          self.parent = kwargs.get('parent', None)

+         self.mmds = []

  

      def __str__(self):

          return self.uid

@@ -81,50 +81,6 @@ 

          with open(self.event_file) as f:

              self.assertEqual(json.load(f), EVENT_INFO)

  

- 

- class TestGetKojiTagInfo(helpers.PungiTestCase):

-     def setUp(self):

-         super(TestGetKojiTagInfo, self).setUp()

-         self.compose = helpers.DummyCompose(self.topdir, {

-             'pkgset_koji_tag': 'f25'

-         })

-         self.compose.DEBUG = False

-         self.tag_file = os.path.join(self.topdir, 'work', 'global', 'koji-tag')

-         self.koji_wrapper = mock.Mock()

- 

-     def test_get_tag_info(self):

-         self.koji_wrapper.koji_proxy.getTag.return_value = TAG_INFO

- 

-         tag_info = source_koji.get_koji_tag_info(self.compose, self.koji_wrapper)

- 

-         self.assertEqual(tag_info, TAG_INFO)

-         self.assertItemsEqual(

-             self.koji_wrapper.mock_calls,

-             [mock.call.koji_proxy.getTag('f25')]

-         )

-         with open(self.tag_file) as f:

-             self.assertEqual(json.load(f), TAG_INFO)

- 

-     def test_bad_tag_name(self):

-         self.koji_wrapper.koji_proxy.getTag.return_value = None

- 

-         with self.assertRaises(ValueError) as ctx:

-             source_koji.get_koji_tag_info(self.compose, self.koji_wrapper)

- 

-         self.assertIn('Unknown', str(ctx.exception))

- 

-     def test_get_tag_info_in_debug_mode(self):

-         self.compose.DEBUG = True

-         helpers.touch(self.tag_file, json.dumps(TAG_INFO))

- 

-         tag_info = source_koji.get_koji_tag_info(self.compose, self.koji_wrapper)

- 

-         self.assertEqual(tag_info, TAG_INFO)

-         self.assertItemsEqual(self.koji_wrapper.mock_calls, [])

-         with open(self.tag_file) as f:

-             self.assertEqual(json.load(f), TAG_INFO)

- 

- 

  class TestPopulateGlobalPkgset(helpers.PungiTestCase):

      def setUp(self):

          super(TestPopulateGlobalPkgset, self).setUp()
@@ -145,7 +101,7 @@ 

          orig_pkgset = KojiPackageSet.return_value

  

          pkgset = source_koji.populate_global_pkgset(

-             self.compose, self.koji_wrapper, '/prefix', 'f25', 123456)

+             self.compose, self.koji_wrapper, '/prefix', 123456)

  

          self.assertIs(pkgset, orig_pkgset)

          self.assertEqual(
@@ -168,7 +124,7 @@ 

          with mock.patch('pungi.phases.pkgset.sources.source_koji.open',

                          mock.mock_open(), create=True) as m:

              pkgset = source_koji.populate_global_pkgset(

-                 self.compose, self.koji_wrapper, '/prefix', 'f25', 123456)

+                 self.compose, self.koji_wrapper, '/prefix', 123456)

  

          self.assertEqual(pickle_load.call_args_list,

                           [mock.call(m.return_value)])
@@ -204,12 +160,12 @@ 

  

          self.assertItemsEqual(

              self.koji_wrapper.koji_proxy.mock_calls,

-             [mock.call.getLastEvent(),

-              mock.call.getTag('f25')]

+             [mock.call.getLastEvent()]

          )

+ 

          self.assertEqual(pgp.call_args_list,

                           [mock.call(self.compose, self.koji_wrapper, '/prefix',

-                                     TAG_INFO, EVENT_INFO)])

+                                     EVENT_INFO)])

          self.assertEqual(pap.call_args_list,

                           [mock.call(self.compose, '/prefix', pgp.return_value)])

          self.assertEqual(cgr.call_args_list,

This is work in progress, but I would appreciate some initial review to find out if the current code looks sane. The tests are broken currently and I'm going to fix them to match the current code if we decide it's the way to go.

What I've changed:
- variants.xml can now contain "<modules><module>foobar</module>...</modules>" to define the names of the modules which should be included in the variant.
- In PKGSET, all the variants are scanned for the modules. Metadata for these modules are fetched from the PDC (we need the modulemd definition of a module and the name of Koji tag in which the module is built).
- In PKGSET, the pkgsets from Koji tags for modules are generated using the same code as before the patch. They are merged together to create global_pkgset. Per variant pkgset is also stored in Variant.pkgset, so we can later in GATHER phase find out what packages are in which module.
- In GATHER phase, new source_module is created, which loads the Modulemd file fetched in PKGSET phase and based on the Variant.pkgset, it chooses the packages from the pkgset which should be in particular variant.

The way I understand this it will return packages that are in the package set and listed in the module. If the package is wanted by the module, but not actually in the package set, it will be silently skipped. I think it would be more in line with the other sources if only the modules were checked here. If the package is required by a module, but not available in the package set, depsolving will later report an error about unavailable package.

Why log just the first tag? That could be any tag when using the modules, right? (In the usual case this would print the only tag, so that's fine.)

Can a variant have multiple tags? If yes, then what's the point in merging packages sets when only one will always be used?

The koji_tag seems to only be used in the populate_global_pkgset function. Does it make sense to move it into a local variable (with some mapping) into that function?

Isn't this actually going to make two identical requests to PDC?

Overall the code looks sane to me. I expected the changes to be a lot more invasive. I'm not a huge fan of the hard dependency on pdc-client. Ultimately that dependency should probably go into a subpackage so that users that don't need this avoid a bunch of dependencies.

There is also a dependency that if gather source is modules, pkgset source has to be koji, which is something new. Other gather and pkgset sources can be combined pretty freely.

I'm not sure though if it actually works correctly. Imagine this situation: there are two modules from two different tags. M1 has foo-1.0.0 tagged in, M2 has foo-2.0.0. The pkgset phase will return a global package set that contains both these builds. The temporary repos (one per arch) will therefore also contain both these builds.

Then gather source will say variants for these modules should contain package foo. When depsolver runs with that input, it will pick the latest version for both modules.

Also if there are other non-module variants, they could pull in packages from module tags as well.

The problem with your proposed way is that we do not have list of binary RPMs in the module definition. We only have the list of SRPMs and list of RPMs which are supported by the module author to be dependend on (basically list of API which is provided by a module). This list of RPMs is just subset of all RPMs needed by the module.

I could probably add all the packages from pkgset here and to not check modulemd at all, but that could add some packages, which have been added to buildroot manually somehow and are not in the modulemd file, which is not correct situation.

Maybe I do not understand "merge" correctly, but the idea here is that single variant can be create from multiple modules (= multiple koji tags). Here I get the pkgset from each tag which is used by some variant and for variants where the tag is really used, I merge it with pkgsets from previously found tags. The overall goal here is that variant.pkgset contains the union of packages from all the tags (=modules) used by a variant.

So for example if I have variant Fedora-Server consisting from module "httpd" living in "module-httpd-master-1" tag and nginx living in "module-nginx-master-1" tag, I want Variant.pkgset to be union of the packages from both tags.

Ah, that's why tests for environment has been failing, thanks... :)

But Variant.koji_tag is a single value. So merge here will only be called once. Or maybe I'm missing something.

Ah, I see I wrote it wrong. It should have said "if not".

Uh, that's me being stupid, sry. There should be "koji_tags" list. Good point, I will fix that in next version of patch.

Overall the code looks sane to me. I expected the changes to be a lot more invasive. I'm not a huge fan of the hard dependency on pdc-client. Ultimately that dependency should probably go into a subpackage so that users that don't need this avoid a bunch of dependencies.

That's good idea, I will make the PDC dependency optional.

There is also a dependency that if gather source is modules, pkgset source has to be koji, which is something new. Other gather and pkgset sources can be combined pretty freely.

Yes :(. Unfortunately I didn't come up with some idea where/how to make the same without making the code much complicated... Are there more sources than just koji and repository? I could try to come up with something for source_repos, but that will need more work, because we do not have modules in local repos - I can of course create testing one :).

Do you consider this to be a blocker?

I'm not sure though if it actually works correctly. Imagine this situation: there are two modules from two different tags. M1 has foo-1.0.0 tagged in, M2 has foo-2.0.0. The pkgset phase will return a global package set that contains both these builds. The temporary repos (one per arch) will therefore also contain both these builds.

This is the way we want it to work I think (I will discuss that explicitly with @ralph today). In the end, for single product, we should be able to store all the modules (and therefore also their packages) in the single repository with just modulemd metadata on top of that.

If two modules conflict with each other on RPM level, we should not ship them in single product.

Then gather source will say variants for these modules should contain package foo. When depsolver runs with that input, it will pick the latest version for both modules.
Also if there are other non-module variants, they could pull in packages from module tags as well.

That's something I have to talk about with you and Ralph - So far I presume we will have just modular variants per compose (In the end that's needed by the source_module, which sets the source per whole product, not per variant).

In the end, for single product, we should be able to store all the modules (and therefore also their packages) in the single repository with just modulemd metadata on top of that.

Yeah, that's the current thinking. Even though traditional client and image-building tools would take the latest of two present but differently versioned rpms, we'll eventually need client tooling to be able to select one of the two different versions based on some other decision making. It's okay if that stuff doesn't exist today - but it is a goal to be able to have multiple versions of the different modules' rpms in the same repo. (We were originally thinking that we would have different repos for each of these modules' content.. but lately the advice I've been given is to collapse them into one for the compose and to defer to future as-yet-unwritten client tooling.)

Do you consider this to be a blocker?

No, it's fine. It's not necessary to allow all the combinations to work together.

I'm not sure though if it actually works correctly. Imagine this situation: there are two modules from two different tags. M1 has foo-1.0.0 tagged in, M2 has foo-2.0.0. The pkgset phase will return a global package set that contains both these builds. The temporary repos (one per arch) will therefore also contain both these builds.

This is the way we want it to work I think (I will discuss that explicitly with @ralph today). In the end, for single product, we should be able to store all the modules (and therefore also their packages) in the single repository with just modulemd metadata on top of that.

If two modules conflict with each other on RPM level, we should not ship them in single product.

What I was trying to say is that the older version will not make it into the final repo at all. I expected you would want different modules with possibly different versions of the same package.

1 new commit added

  • Make PDC and modulemd deps optional, include modulemd metadata in RPM repository, fix the source_module to include noarch packages, clean up the code.
7 years ago

2 new commits added

  • Make PDC and modulemd deps optional, include modulemd metadata in RPM repository, fix the source_module to include noarch packages, clean up the code.
  • WIP: Add support for modular composes
7 years ago

This last commit fixes most of the issues you have pointed out in your initial review.

It also addresses https://pagure.io/pungi/issue/482, because DNF team asked me for that yesterday and it is just few more lines of code in createrepo phase.

1 new commit added

  • Fix tests.
7 years ago

Tests are passing now :).

This should probably emit some kind of warning so that an absent pdc-client doesn't cause different behavior without telling anyone.

log.warn(...)?

Minor style here: should be modules=None with no spaces to conform to other method signatures.

What I was trying to say is that the older version will not make it into the final repo at all. I expected you would want different modules with possibly different versions of the same package.

Does the new method for the gather phase get us around this now?

Does the new method for the gather phase get us around this now?

Yes, that's exactly the problem we can solve by that, but my plan was to do that once we merge this PR.

Very minor detail: this comment should probably be removed or updated.

This is the only place where the function is called. If you remove it, you could as well remove the whole function. I'm not sure if that's desirable though, as it creates a trace of what the tag looked like. Admittedly the return value is just ignored, so maybe completely dropping this is not a bad idea after all.

This would be better as if tag not in compose_tags.

Are the default and uservisible attributes used for something? Will they be?

There are a few PEP8 complaints about the new code. While the whole codebase does not currently pass, it would be nice for new code to be compliant.

Does the new method for the gather phase get us around this now?

Yes, that's exactly the problem we can solve by that, but my plan was to do that once we merge this PR.

This might not be so easy: the gather source only returns a list of package names. This list is then expanded to include all dependencies. At no point are package versions taken into account. The assumption here always was that we are getting the latest builds from Koji.

This might not be so easy: the gather source only returns a list of package names. This list is then expanded to include all dependencies. At no point are package versions taken into account. The assumption here always was that we are getting the latest builds from Koji.

Hm, I have not tried to implement that, it's the next step after this PR, but thanks for the info.

1 new commit added

  • Fix comments, remove get_koji_tag_info, remove unused attributes from module dtd section, log warning when modules are not enabled
7 years ago

1 new commit added

  • Fix formatting to pass PEP8
7 years ago

5 new commits added

  • Fix formatting to pass PEP8
  • Fix comments, remove get_koji_tag_info, remove unused attributes from module dtd section, log warning when modules are not enabled
  • Fix tests.
  • Make PDC and modulemd deps optional, include modulemd metadata in RPM repository, fix the source_module to include noarch packages, clean up the code.
  • WIP: Add support for modular composes
7 years ago

rebased

7 years ago

rebased

7 years ago

I tested this in staging infra and the patch has no effect on regular compose based on a single tag.

It would be cool to have some documentation and/or tests to go along with the code change, but let's not block on that.

Pull-Request has been merged by lsedlar

7 years ago