PR#4 Merged fix up pungi4 and productmd

Proposed 2 years ago by maxamillion
Modified 2 years ago
From forks/maxamillion/pungi run_nightly  into pungi master

@@ -0,0 +1,209 @@ 

+ #!/usr/bin/python

+ # -*- coding: utf-8 -*-

+ 

+ 

+ import sys

+ import fnmatch

+ import optparse

+ import lxml.etree

+ import re

+ from io import StringIO

+ 

+ 

+ class CompsFilter(object):

+     def __init__(self, file_obj, reindent=False):

+         self.reindent = reindent

+         parser = None

+         if self.reindent:

+             parser = lxml.etree.XMLParser(remove_blank_text=True)

+         self.tree = lxml.etree.parse(file_obj, parser=parser)

+         self.encoding = "utf-8"

+ 

+     def _filter_elements_by_arch(self, xpath, arch, only_arch=False):

+         if only_arch:

+             # remove all elements without the 'arch' attribute

+             for i in self.tree.xpath(xpath + "[not(@arch)]"):

+                 i.getparent().remove(i)

+ 

+         for i in self.tree.xpath(xpath + "[@arch]"):

+             arches = i.attrib.get("arch")

+             arches = re.split(r"[, ]+", arches)

+             arches = [j for j in arches if j]

+             if arch not in arches:

+                 # remove elements not matching the arch

+                 i.getparent().remove(i)

+             else:

+                 # remove the 'arch' attribute

+                 del i.attrib["arch"]

+ 

+     def filter_packages(self, arch, only_arch=False):

+         """

+         Filter packages according to arch.

+         If only_arch is set, then only packages for the specified arch are preserved.

+         Multiple arches separated by comma can be specified in the XML.

+         """

+         self._filter_elements_by_arch("/comps/group/packagelist/packagereq", arch, only_arch)

+ 

+     def filter_groups(self, arch, only_arch=False):

+         """

+         Filter groups according to arch.

+         If only_arch is set, then only groups for the specified arch are preserved.

+         Multiple arches separated by comma can be specified in the XML.

+         """

+         self._filter_elements_by_arch("/comps/group", arch, only_arch)

+ 

+     def filter_category_groups(self):

+         """

+         Remove undefined groups from categories.

+         """

+         all_groups = self.tree.xpath("/comps/group/id/text()")

+         for category in self.tree.xpath("/comps/category"):

+             for group in category.xpath("grouplist/groupid"):

+                 if group.text not in all_groups:

+                     group.getparent().remove(group)

+ 

+     def remove_empty_groups(self, keep_empty=None):

+         """

+         Remove all groups without packages.

+         """

+         keep_empty = keep_empty or []

+         for group in self.tree.xpath("/comps/group"):

+             if not group.xpath("packagelist/packagereq"):

+                 group_id = group.xpath("id/text()")[0]

+                 found = False

+                 for pattern in keep_empty:

+                     if fnmatch.fnmatch(group_id, pattern):

+                         found = True

+                         break

+                 if found:

+                     continue

+                 group.getparent().remove(group)

+ 

+     def remove_empty_categories(self):

+         """

+         Remove all categories without groups.

+         """

+         for category in self.tree.xpath("/comps/category"):

+             if not category.xpath("grouplist/groupid"):

+                 category.getparent().remove(category)

+ 

+     def remove_categories(self):

+         """

+         Remove all categories.

+         """

+         categories = self.tree.xpath("/comps/category")

+         for i in categories:

+             i.getparent().remove(i)

+ 

+     def remove_langpacks(self):

+         """

+         Remove all langpacks.

+         """

+         langpacks = self.tree.xpath("/comps/langpacks")

+         for i in langpacks:

+             i.getparent().remove(i)

+ 

+     def remove_translations(self):

+         """

+         Remove all translations.

+         """

+         for i in self.tree.xpath("//*[@xml:lang]"):

+             i.getparent().remove(i)

+ 

+     def filter_environment_groups(self):

+         """

+         Remove undefined groups from environments.

+         """

+         all_groups = self.tree.xpath("/comps/group/id/text()")

+         for environment in self.tree.xpath("/comps/environment"):

+             for group in environment.xpath("grouplist/groupid"):

+                 if group.text not in all_groups:

+                     group.getparent().remove(group)

+ 

+     def remove_empty_environments(self):

+         """

+         Remove all environments without groups.

+         """

+         for environment in self.tree.xpath("/comps/environment"):

+             if not environment.xpath("grouplist/groupid"):

+                 environment.getparent().remove(environment)

+ 

+     def remove_environments(self):

+         """

+         Remove all langpacks.

+         """

+         environments = self.tree.xpath("/comps/environment")

+         for i in environments:

+             i.getparent().remove(i)

+ 

+     def write(self, file_obj):

+         self.tree.write(file_obj, pretty_print=self.reindent, xml_declaration=True, encoding=self.encoding)

+         file_obj.write("\n")

+ 

+     def pprint(self):

+         self.write(sys.stdout)

+ 

+     def xml(self):

+         io = StringIO()

+         self.write(io)

+         io.seek(0)

+         return io.read()

+ 

+ 

+ def main():

+     parser = optparse.OptionParser("%prog [options] <comps.xml>")

+     parser.add_option("--output", help="redirect output to a file")

+     parser.add_option("--arch", help="filter groups and packagews according to an arch")

+     parser.add_option("--arch-only-groups", default=False, action="store_true", help="keep only arch groups, remove the rest")

+     parser.add_option("--arch-only-packages", default=False, action="store_true", help="keep only arch packages, remove the rest")

+     parser.add_option("--remove-categories", default=False, action="store_true", help="remove all categories")

+     parser.add_option("--remove-langpacks", default=False, action="store_true", help="remove the langpacks section")

+     parser.add_option("--remove-translations", default=False, action="store_true", help="remove all translations")

+     parser.add_option("--remove-environments", default=False, action="store_true", help="remove all environment sections")

+     parser.add_option("--keep-empty-group", default=[], action="append", metavar="[GROUPID]", help="keep groups even if they are empty")

+     parser.add_option("--no-cleanup", default=False, action="store_true", help="don't remove empty groups and categories")

+     parser.add_option("--no-reindent", default=False, action="store_true", help="don't re-indent the output")

+ 

+     opts, args = parser.parse_args()

+ 

+     if len(args) != 1:

+         parser.error("please specify exactly one comps file")

+ 

+     comps_file = args[0]

+ 

+     if opts.arch is None:

+         parser.error("please specify arch")

+ 

+     file_obj = open(comps_file, "r")

+     f = CompsFilter(file_obj, reindent=not opts.no_reindent)

+     f.filter_packages(opts.arch, opts.arch_only_packages)

+     f.filter_groups(opts.arch, opts.arch_only_groups)

+ 

+     if not opts.no_cleanup:

+         f.remove_empty_groups(keep_empty=opts.keep_empty_group)

+         f.filter_category_groups()

+         f.remove_empty_categories()

+         f.filter_environment_groups()

+         f.remove_empty_environments()

+ 

+     if opts.remove_categories:

+         f.remove_categories()

+ 

+     if opts.remove_langpacks:

+         f.remove_langpacks()

+ 

+     if opts.remove_translations:

+         f.remove_translations()

+ 

+     if opts.remove_environments:

+         f.remove_environments()

+ 

+     if opts.output:

+         out = open(opts.output, "w")

+         f.write(out)

+     else:

+         f.pprint()

+ 

+ 

+ if __name__ == "__main__":

+     main()
pungi.spec +1 -0
file changed

@@ -16,6 +16,7 @@ 

  Requires:       python-productmd

  Requires:       python-kickstart

  Requires:       libselinux-python

+ Requires:       createrepo_c

  

  BuildArch:      noarch

  
file changed

@@ -27,7 +27,7 @@ 

  import shutil

  

  import kobo.log

- from productmd.composeinfo import Compose

+ from productmd.composeinfo import ComposeInfo

  from productmd.images import Images

  

  from pungi.wrappers.variants import VariantsXmlParser

@@ -38,16 +38,18 @@

  

  

  def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):

-     topdir = os.path.abspath(topdir)

      already_exists_callbacks = already_exists_callbacks or []

  

      # create an incomplete composeinfo to generate compose ID

-     ci = Compose()

-     ci.product.name = conf["product_name"]

-     ci.product.short = conf["product_short"]

-     ci.product.version = conf["product_version"]

-     ci.product.is_layered = bool(conf.get("product_is_layered", False))

-     if ci.product.is_layered:

+     ci = ComposeInfo()

+     ci.compose.name = conf["product_name"]

+     ci.release.name = conf["product_name"]

+     ci.compose.short = conf["product_short"]

+     ci.release.short = conf["product_short"]

+     ci.compose.version = conf["product_version"]

+     ci.release.version = conf["product_version"]

+     ci.compose.is_layered = bool(conf.get("product_is_layered", False))

+     if ci.compose.is_layered:

          ci.base_product.name = conf["base_product_name"]

          ci.base_product.short = conf["base_product_short"]

          ci.base_product.version = conf["base_product_version"]

@@ -57,9 +59,6 @@

      ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())

      ci.compose.respin = compose_respin or 0

  

-     # HACK - add topdir for callbacks

-     ci.topdir = topdir

- 

      while 1:

          ci.compose.id = ci.create_compose_id()

  

@@ -113,7 +112,7 @@

          self.paths = Paths(self)

  

          # to provide compose_id, compose_date and compose_respin

-         self.ci_base = Compose()

+         self.ci_base = ComposeInfo()

          self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))

  

          self.supported = supported

@@ -121,7 +120,7 @@

              self.log_info("Automatically setting 'supported' flag for a Release Candidate (%s) compose." % self.compose_label)

              self.supported = True

  

-         self.im = ImageManifest()

+         self.im = Images()

          if self.DEBUG:

              try:

                  self.im.load(self.paths.compose.metadata("images.json"))

@@ -198,6 +197,9 @@

          self.variants = VariantsXmlParser(file_obj, tree_arches).parse()

  

          # populate ci_base with variants - needed for layered-products (compose_id)

+         ####FIXME - compose_to_composeinfo is no longer needed and has been

+         ####        removed, but I'm not entirely sure what this is needed for

+         ####        or if it is at all

          self.ci_base = compose_to_composeinfo(self)

  

      def get_variants(self, types=None, arch=None, recursive=False):
file changed

@@ -73,13 +73,13 @@ 

      ci.compose.label = compose.compose_label

  

      # product

-     ci.product.name = compose.conf["product_name"]

-     ci.product.version = compose.conf["product_version"]

-     ci.product.short = compose.conf["product_short"]

-     ci.product.is_layered = compose.conf.get("product_is_layered", False)

+     ci.release.name = compose.conf["product_name"]

+     ci.release.version = compose.conf["product_version"]

+     ci.release.short = compose.conf["product_short"]

+     ci.release.is_layered = compose.conf.get("product_is_layered", False)

  

      # base product

-     if ci.product.is_layered:

+     if ci.release.is_layered:

          ci.base_product.name = compose.conf["base_product_name"]

          ci.base_product.version = compose.conf["base_product_version"]

          ci.base_product.short = compose.conf["base_product_short"]

@@ -99,38 +99,38 @@

          var.arches = set(variant.arches)

  

          if var.type == "layered-product":

-             var.product.name = variant.product_name

-             var.product.short = variant.product_short

-             var.product.version = variant.product_version

-             var.product.is_layered = True

+             var.release.name = variant.product_name

+             var.release.short = variant.product_short

+             var.release.version = variant.product_version

+             var.release.is_layered = True

  

          for arch in variant.arches:

              # paths: binaries

-             var.os_tree[arch] = relative_path(compose.paths.compose.os_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

-             var.repository[arch] = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

-             var.packages[arch] = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.os_tree[arch] = relative_path(compose.paths.compose.os_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.repository[arch] = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.packages[arch] = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

              iso_dir = compose.paths.compose.iso_dir(arch=arch, variant=variant, create_dir=False) or ""

              if iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), iso_dir)):

-                 var.isos[arch] = relative_path(iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+                 var.paths.isos[arch] = relative_path(iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

              jigdo_dir = compose.paths.compose.jigdo_dir(arch=arch, variant=variant, create_dir=False) or ""

              if jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), jigdo_dir)):

-                 var.jigdos[arch] = relative_path(jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+                 var.paths.jigdos[arch] = relative_path(jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

  

              # paths: sources

-             var.source_tree[arch] = relative_path(compose.paths.compose.os_tree(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

-             var.source_repository[arch] = relative_path(compose.paths.compose.repository(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

-             var.source_packages[arch] = relative_path(compose.paths.compose.packages(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.source_tree[arch] = relative_path(compose.paths.compose.os_tree(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.source_repository[arch] = relative_path(compose.paths.compose.repository(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.source_packages[arch] = relative_path(compose.paths.compose.packages(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

              source_iso_dir = compose.paths.compose.iso_dir(arch="source", variant=variant, create_dir=False) or ""

              if source_iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_iso_dir)):

-                 var.source_isos[arch] = relative_path(source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+                 var.paths.source_isos[arch] = relative_path(source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

              source_jigdo_dir = compose.paths.compose.jigdo_dir(arch="source", variant=variant, create_dir=False) or ""

              if source_jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)):

-                 var.source_jigdos[arch] = relative_path(source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+                 var.paths.source_jigdos[arch] = relative_path(source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

  

              # paths: debug

-             var.debug_tree[arch] = relative_path(compose.paths.compose.debug_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

-             var.debug_repository[arch] = relative_path(compose.paths.compose.debug_repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

-             var.debug_packages[arch] = relative_path(compose.paths.compose.debug_packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.debug_tree[arch] = relative_path(compose.paths.compose.debug_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.debug_repository[arch] = relative_path(compose.paths.compose.debug_repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

+             var.paths.debug_packages[arch] = relative_path(compose.paths.compose.debug_packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

              '''

              # XXX: not suported (yet?)

              debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""

@@ -186,10 +186,10 @@

  

          # product

          # TODO: read from variants.xml

-         ti.product.name = variant.product_name

-         ti.product.version = variant.product_version

-         ti.product.short = variant.product_short

-         ti.product.is_layered = True

+         ti.release.name = variant.product_name

+         ti.release.version = variant.product_version

+         ti.release.short = variant.product_short

+         ti.release.is_layered = True

  

          # base product

          ti.base_product.name = compose.conf["product_name"]

@@ -201,13 +201,13 @@

          ti.base_product.short = compose.conf["product_short"]

      else:

          # product

-         ti.product.name = compose.conf["product_name"]

-         ti.product.version = compose.conf["product_version"]

-         ti.product.short = compose.conf["product_short"]

-         ti.product.is_layered = compose.conf.get("product_is_layered", False)

+         ti.release.name = compose.conf["product_name"]

+         ti.release.version = compose.conf["product_version"]

+         ti.release.short = compose.conf["product_short"]

+         ti.release.is_layered = compose.conf.get("product_is_layered", False)

  

          # base product

-         if ti.product.is_layered:

+         if ti.release.is_layered:

              ti.base_product.name = compose.conf["base_product_name"]

              ti.base_product.version = compose.conf["base_product_version"]

              ti.base_product.short = compose.conf["base_product_short"]

@@ -236,7 +236,7 @@

      ti.variants.add(var)

  

      repomd_path = os.path.join(var.repository, "repodata", "repomd.xml")

-     ti.checksums.add(os_tree, repomd_path)

+     ti.checksums.add(repomd_path, "sha256", os_tree)

  

      for i in variant.get_variants(types=["addon"], arch=arch):

          addon = productmd.treeinfo.Variant(ti)

@@ -251,17 +251,17 @@

          var.add(addon)

  

          repomd_path = os.path.join(addon.repository, "repodata", "repomd.xml")

-         ti.checksums.add(os_tree, repomd_path)

+         ti.checksums.add(repomd_path, "sha256", os_tree)

  

-     class LoraxProduct(productmd.treeinfo.product.Product):

+     class LoraxProduct(productmd.treeinfo.Release):

          def _check_short(self):

              # HACK: set self.short so .treeinfo produced by lorax can be read

              if not self.short:

                  self.short = compose.conf["product_short"]

  

-     class LoraxTreeInfo(productmd.TreeInfo):

+     class LoraxTreeInfo(productmd.treeinfo.TreeInfo):

          def clear(self):

-             productmd.TreeInfo.clear(self)

+             super(LoraxTreeInfo, self).clear()

              self.product = LoraxProduct(self)

  

      # images

@@ -277,12 +277,12 @@

              # stage2 - mainimage

              if bi_ti.stage2.mainimage:

                  ti.stage2.mainimage = bi_ti.stage2.mainimage

-                 ti.checksums.add(os_tree, ti.stage2.mainimage)

+                 ti.checksums.add(ti.stage2.mainimage, "sha256", os_tree)

  

              # stage2 - instimage

              if bi_ti.stage2.instimage:

                  ti.stage2.instimage = bi_ti.stage2.instimage

-                 ti.checksums.add(os_tree, ti.stage2.instimage)

+                 ti.checksums.add(ti.stage2.instimage, "sha256", os_tree)

  

              # images

              for platform in bi_ti.images.images:

@@ -290,7 +290,7 @@

                  ti.tree.platforms.add(platform)

                  for image, path in bi_ti.images.images[platform].items():

                      ti.images.images[platform][image] = path

-                     ti.checksums.add(os_tree, path)

+                     ti.checksums.add(path, "sha256", os_tree)

  

          # add product.img to images-$arch

          product_img = os.path.join(os_tree, "images", "product.img")

@@ -298,7 +298,7 @@

          if os.path.isfile(product_img):

              for platform in ti.images.images:

                  ti.images.images[platform]["product.img"] = product_img_relpath

-                 ti.checksums.add(os_tree, product_img_relpath)

+                 ti.checksums.add(product_img_relpath, "sha256", os_tree)

  

      path = os.path.join(compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo")

      compose.log_info("Writing treeinfo: %s" % path)
file changed

@@ -26,7 +26,7 @@ 

  

  from kobo.threads import ThreadPool, WorkerThread

  from kobo.shortcuts import run, read_checksum_file, relative_path

- from productmd.imagemanifest import Image

+ from productmd.images import Image

  

  from pungi.util import get_buildroot_rpms, get_volid

  from pungi.wrappers.lorax import LoraxWrapper

@@ -299,7 +299,7 @@

          img.volume_id = iso.get_volume_id(new_boot_iso_path)

      except RuntimeError:

          pass

-     compose.im.add(arch, variant.uid, img)

+     compose.im.add(variant.uid, arch, img)

      compose.log_info("[DONE ] %s" % msg)

  

  
file changed

@@ -23,7 +23,7 @@ 

  

  import koji

  import productmd.treeinfo

- from productmd.imagemanifest import Image

+ from productmd.images import Image

  from kobo.threads import ThreadPool, WorkerThread

  from kobo.shortcuts import run, read_checksum_file, relative_path

  

@@ -275,7 +275,7 @@

              img.volume_id = iso.get_volume_id(cmd["iso_path"])

          except RuntimeError:

              pass

-         compose.im.add(cmd["arch"], cmd["variant"].uid, img)

+         compose.im.add(cmd["variant"].uid, cmd["arch"], img)

          # TODO: supported_iso_bit

          # add: boot.iso

  
file changed

@@ -21,7 +21,7 @@ 

  import json

  

  from kobo.rpmlib import parse_nvra

- from productmd import RpmManifest

+ from productmd.rpms import Rpms

  

  from pungi.wrappers.scm import get_file_from_scm

  from link import link_files

@@ -112,7 +112,7 @@

          pkg_map = gather_wrapper(self.compose, self.pkgset_phase.package_sets, self.pkgset_phase.path_prefix)

  

          manifest_file = self.compose.paths.compose.metadata("rpms.json")

-         manifest = RpmManifest()

+         manifest = Rpms()

          manifest.compose.id = self.compose.compose_id

          manifest.compose.type = self.compose.compose_type

          manifest.compose.date = self.compose.compose_date
file changed

@@ -62,7 +62,7 @@ 

          # update rpm manifest

          pkg_obj = pkg_set[pkg["path"]]

          nevra = pkg_obj.nevra

-         manifest.add("src", variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="source")

+         manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="source")

  

          # update srpm_map

          srpm_map.setdefault(pkg_obj.file_name, nevra)

@@ -80,7 +80,7 @@

          pkg_obj = pkg_set[pkg["path"]]

          nevra = pkg_obj.nevra

          src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)

-         manifest.add(arch, variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="package", srpm_nevra=src_nevra)

+         manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="binary", srpm_nevra=src_nevra)

  

      packages_dir = compose.paths.compose.debug_packages(arch, variant)

      packages_dir_relpath = compose.paths.compose.debug_packages(arch, variant, relative=True)

@@ -95,7 +95,7 @@

          pkg_obj = pkg_set[pkg["path"]]

          nevra = pkg_obj.nevra

          src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)

-         manifest.add(arch, variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="debug", srpm_nevra=src_nevra)

+         manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="debug", srpm_nevra=src_nevra)

  

      pool.start()

      pool.stop()

@@ -69,7 +69,7 @@ 

  

  def write_pungi_config(compose, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, repos=None, comps_repo=None, package_set=None, fulltree_excludes=None, prepopulate=None):

      """write pungi config (kickstart) for arch/variant"""

-     pungi = PungiWrapper()

+     pungi_wrapper = PungiWrapper()

      pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)

      msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)

  

@@ -101,17 +101,17 @@

          else:

              filter_packages_str.append(pkg_name)

  

-     pungi.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str, exclude_packages=filter_packages_str, comps_repo=comps_repo, lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes, multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist, prepopulate=prepopulate)

+     pungi_wrapper.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str, exclude_packages=filter_packages_str, comps_repo=comps_repo, lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes, multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist, prepopulate=prepopulate)

  

  

  def resolve_deps(compose, arch, variant):

-     pungi = PungiWrapper()

+     pungi_wrapper = PungiWrapper()

      pungi_log = compose.paths.work.pungi_log(arch, variant)

  

      msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant)

      if compose.DEBUG and os.path.exists(pungi_log):

          compose.log_warning("[SKIP ] %s" % msg)

-         return pungi.get_packages(open(pungi_log, "r").read())

+         return pungi_wrapper.get_packages(open(pungi_log, "r").read())

  

      compose.log_info("[BEGIN] %s" % msg)

      pungi_conf = compose.paths.work.pungi_conf(arch, variant)

@@ -146,7 +146,7 @@

      yum_arch = tree_arch_to_yum_arch(arch)

      tmp_dir = compose.paths.work.tmp_dir(arch, variant)

      cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)

-     cmd = pungi.get_pungi_cmd(pungi_conf, destdir=tmp_dir, name=variant.uid, selfhosting=selfhosting, fulltree=fulltree, arch=yum_arch, full_archlist=True, greedy=greedy_method, cache_dir=cache_dir, lookaside_repos=lookaside_repos, multilib_methods=multilib_methods)

+     cmd = pungi_wrapper.get_pungi_cmd(pungi_conf, destdir=tmp_dir, name=variant.uid, selfhosting=selfhosting, fulltree=fulltree, arch=yum_arch, full_archlist=True, greedy=greedy_method, cache_dir=cache_dir, lookaside_repos=lookaside_repos, multilib_methods=multilib_methods)

      # Use temp working directory directory as workaround for

      # https://bugzilla.redhat.com/show_bug.cgi?id=795137

      tmp_dir = tempfile.mkdtemp(prefix="pungi_")

@@ -154,7 +154,7 @@

          run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir)

      finally:

          rmtree(tmp_dir)

-     result = pungi.get_packages(open(pungi_log, "r").read())

+     result = pungi_wrapper.get_packages(open(pungi_log, "r").read())

  

      compose.log_info("[DONE ] %s" % msg)

      return result

@@ -165,9 +165,9 @@

      if not check_deps:

          return

  

-     pungi = PungiWrapper()

+     pungi_wrapper = PungiWrapper()

      pungi_log = compose.paths.work.pungi_log(arch, variant)

-     missing_deps = pungi.get_missing_deps(open(pungi_log, "r").read())

+     missing_deps = pungi_wrapper.get_missing_deps(open(pungi_log, "r").read())

      if missing_deps:

          for pkg in sorted(missing_deps):

              compose.log_error("Unresolved dependencies in package %s: %s" % (pkg, sorted(missing_deps[pkg])))

@@ -150,7 +150,7 @@ 

  

  def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, package_set=None):

      """write pungi config (kickstart) for arch/variant"""

-     pungi = PungiWrapper()

+     pungi_wrapper = PungiWrapper()

      pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)

      msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)

  

@@ -182,4 +182,4 @@

          packages.append("system-release")

  

      prepopulate = get_prepopulate_packages(compose, arch, None)

-     pungi.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)

+     pungi_wrapper.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)

@@ -36,6 +36,7 @@ 

      scripts         = [

          'bin/pungi',

          'bin/pungi-koji',

+         'bin/comps_filter',

      ],

      data_files      = [

          ('/usr/share/pungi', glob.glob('share/*.xsl')),
Changes summary
+1 -0
file changed
+15 -13
file changed
+40 -40
file changed
+2 -2
file changed
+2 -2
file changed
+2 -2
file changed
+3 -3
file changed