PR#4 Merged fix up pungi4 and productmd

Proposed 2 years ago by maxamillion
Modified 2 years ago
From forks/maxamillion/pungi run_nightly  into pungi master

  1 @@ -0,0 +1,209 @@ 

  2 + #!/usr/bin/python

  3 + # -*- coding: utf-8 -*-

  4 + 

  5 + 

  6 + import sys

  7 + import fnmatch

  8 + import optparse

  9 + import lxml.etree

 10 + import re

 11 + from io import StringIO

 12 + 

 13 + 

 14 + class CompsFilter(object):

 15 +     def __init__(self, file_obj, reindent=False):

 16 +         self.reindent = reindent

 17 +         parser = None

 18 +         if self.reindent:

 19 +             parser = lxml.etree.XMLParser(remove_blank_text=True)

 20 +         self.tree = lxml.etree.parse(file_obj, parser=parser)

 21 +         self.encoding = "utf-8"

 22 + 

 23 +     def _filter_elements_by_arch(self, xpath, arch, only_arch=False):

 24 +         if only_arch:

 25 +             # remove all elements without the 'arch' attribute

 26 +             for i in self.tree.xpath(xpath + "[not(@arch)]"):

 27 +                 i.getparent().remove(i)

 28 + 

 29 +         for i in self.tree.xpath(xpath + "[@arch]"):

 30 +             arches = i.attrib.get("arch")

 31 +             arches = re.split(r"[, ]+", arches)

 32 +             arches = [j for j in arches if j]

 33 +             if arch not in arches:

 34 +                 # remove elements not matching the arch

 35 +                 i.getparent().remove(i)

 36 +             else:

 37 +                 # remove the 'arch' attribute

 38 +                 del i.attrib["arch"]

 39 + 

 40 +     def filter_packages(self, arch, only_arch=False):

 41 +         """

 42 +         Filter packages according to arch.

 43 +         If only_arch is set, then only packages for the specified arch are preserved.

 44 +         Multiple arches separated by comma can be specified in the XML.

 45 +         """

 46 +         self._filter_elements_by_arch("/comps/group/packagelist/packagereq", arch, only_arch)

 47 + 

 48 +     def filter_groups(self, arch, only_arch=False):

 49 +         """

 50 +         Filter groups according to arch.

 51 +         If only_arch is set, then only groups for the specified arch are preserved.

 52 +         Multiple arches separated by comma can be specified in the XML.

 53 +         """

 54 +         self._filter_elements_by_arch("/comps/group", arch, only_arch)

 55 + 

 56 +     def filter_category_groups(self):

 57 +         """

 58 +         Remove undefined groups from categories.

 59 +         """

 60 +         all_groups = self.tree.xpath("/comps/group/id/text()")

 61 +         for category in self.tree.xpath("/comps/category"):

 62 +             for group in category.xpath("grouplist/groupid"):

 63 +                 if group.text not in all_groups:

 64 +                     group.getparent().remove(group)

 65 + 

 66 +     def remove_empty_groups(self, keep_empty=None):

 67 +         """

 68 +         Remove all groups without packages.

 69 +         """

 70 +         keep_empty = keep_empty or []

 71 +         for group in self.tree.xpath("/comps/group"):

 72 +             if not group.xpath("packagelist/packagereq"):

 73 +                 group_id = group.xpath("id/text()")[0]

 74 +                 found = False

 75 +                 for pattern in keep_empty:

 76 +                     if fnmatch.fnmatch(group_id, pattern):

 77 +                         found = True

 78 +                         break

 79 +                 if found:

 80 +                     continue

 81 +                 group.getparent().remove(group)

 82 + 

 83 +     def remove_empty_categories(self):

 84 +         """

 85 +         Remove all categories without groups.

 86 +         """

 87 +         for category in self.tree.xpath("/comps/category"):

 88 +             if not category.xpath("grouplist/groupid"):

 89 +                 category.getparent().remove(category)

 90 + 

 91 +     def remove_categories(self):

 92 +         """

 93 +         Remove all categories.

 94 +         """

 95 +         categories = self.tree.xpath("/comps/category")

 96 +         for i in categories:

 97 +             i.getparent().remove(i)

 98 + 

 99 +     def remove_langpacks(self):

100 +         """

101 +         Remove all langpacks.

102 +         """

103 +         langpacks = self.tree.xpath("/comps/langpacks")

104 +         for i in langpacks:

105 +             i.getparent().remove(i)

106 + 

107 +     def remove_translations(self):

108 +         """

109 +         Remove all translations.

110 +         """

111 +         for i in self.tree.xpath("//*[@xml:lang]"):

112 +             i.getparent().remove(i)

113 + 

114 +     def filter_environment_groups(self):

115 +         """

116 +         Remove undefined groups from environments.

117 +         """

118 +         all_groups = self.tree.xpath("/comps/group/id/text()")

119 +         for environment in self.tree.xpath("/comps/environment"):

120 +             for group in environment.xpath("grouplist/groupid"):

121 +                 if group.text not in all_groups:

122 +                     group.getparent().remove(group)

123 + 

124 +     def remove_empty_environments(self):

125 +         """

126 +         Remove all environments without groups.

127 +         """

128 +         for environment in self.tree.xpath("/comps/environment"):

129 +             if not environment.xpath("grouplist/groupid"):

130 +                 environment.getparent().remove(environment)

131 + 

132 +     def remove_environments(self):

133 +         """

134 +         Remove all langpacks.

135 +         """

136 +         environments = self.tree.xpath("/comps/environment")

137 +         for i in environments:

138 +             i.getparent().remove(i)

139 + 

140 +     def write(self, file_obj):

141 +         self.tree.write(file_obj, pretty_print=self.reindent, xml_declaration=True, encoding=self.encoding)

142 +         file_obj.write("\n")

143 + 

144 +     def pprint(self):

145 +         self.write(sys.stdout)

146 + 

147 +     def xml(self):

148 +         io = StringIO()

149 +         self.write(io)

150 +         io.seek(0)

151 +         return io.read()

152 + 

153 + 

154 + def main():

155 +     parser = optparse.OptionParser("%prog [options] <comps.xml>")

156 +     parser.add_option("--output", help="redirect output to a file")

157 +     parser.add_option("--arch", help="filter groups and packagews according to an arch")

158 +     parser.add_option("--arch-only-groups", default=False, action="store_true", help="keep only arch groups, remove the rest")

159 +     parser.add_option("--arch-only-packages", default=False, action="store_true", help="keep only arch packages, remove the rest")

160 +     parser.add_option("--remove-categories", default=False, action="store_true", help="remove all categories")

161 +     parser.add_option("--remove-langpacks", default=False, action="store_true", help="remove the langpacks section")

162 +     parser.add_option("--remove-translations", default=False, action="store_true", help="remove all translations")

163 +     parser.add_option("--remove-environments", default=False, action="store_true", help="remove all environment sections")

164 +     parser.add_option("--keep-empty-group", default=[], action="append", metavar="[GROUPID]", help="keep groups even if they are empty")

165 +     parser.add_option("--no-cleanup", default=False, action="store_true", help="don't remove empty groups and categories")

166 +     parser.add_option("--no-reindent", default=False, action="store_true", help="don't re-indent the output")

167 + 

168 +     opts, args = parser.parse_args()

169 + 

170 +     if len(args) != 1:

171 +         parser.error("please specify exactly one comps file")

172 + 

173 +     comps_file = args[0]

174 + 

175 +     if opts.arch is None:

176 +         parser.error("please specify arch")

177 + 

178 +     file_obj = open(comps_file, "r")

179 +     f = CompsFilter(file_obj, reindent=not opts.no_reindent)

180 +     f.filter_packages(opts.arch, opts.arch_only_packages)

181 +     f.filter_groups(opts.arch, opts.arch_only_groups)

182 + 

183 +     if not opts.no_cleanup:

184 +         f.remove_empty_groups(keep_empty=opts.keep_empty_group)

185 +         f.filter_category_groups()

186 +         f.remove_empty_categories()

187 +         f.filter_environment_groups()

188 +         f.remove_empty_environments()

189 + 

190 +     if opts.remove_categories:

191 +         f.remove_categories()

192 + 

193 +     if opts.remove_langpacks:

194 +         f.remove_langpacks()

195 + 

196 +     if opts.remove_translations:

197 +         f.remove_translations()

198 + 

199 +     if opts.remove_environments:

200 +         f.remove_environments()

201 + 

202 +     if opts.output:

203 +         out = open(opts.output, "w")

204 +         f.write(out)

205 +     else:

206 +         f.pprint()

207 + 

208 + 

209 + if __name__ == "__main__":

210 +     main()
pungi.spec +1 -0
file changed

1 @@ -16,6 +16,7 @@ 

2   Requires:       python-productmd

3   Requires:       python-kickstart

4   Requires:       libselinux-python

5 + Requires:       createrepo_c

6   

7   BuildArch:      noarch

8   
file changed

 1 @@ -27,7 +27,7 @@ 

 2   import shutil

 3   

 4   import kobo.log

 5 - from productmd.composeinfo import Compose

 6 + from productmd.composeinfo import ComposeInfo

 7   from productmd.images import Images

 8   

 9   from pungi.wrappers.variants import VariantsXmlParser

10 @@ -38,16 +38,18 @@

11   

12   

13   def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):

14 -     topdir = os.path.abspath(topdir)

15       already_exists_callbacks = already_exists_callbacks or []

16   

17       # create an incomplete composeinfo to generate compose ID

18 -     ci = Compose()

19 -     ci.product.name = conf["product_name"]

20 -     ci.product.short = conf["product_short"]

21 -     ci.product.version = conf["product_version"]

22 -     ci.product.is_layered = bool(conf.get("product_is_layered", False))

23 -     if ci.product.is_layered:

24 +     ci = ComposeInfo()

25 +     ci.compose.name = conf["product_name"]

26 +     ci.release.name = conf["product_name"]

27 +     ci.compose.short = conf["product_short"]

28 +     ci.release.short = conf["product_short"]

29 +     ci.compose.version = conf["product_version"]

30 +     ci.release.version = conf["product_version"]

31 +     ci.compose.is_layered = bool(conf.get("product_is_layered", False))

32 +     if ci.compose.is_layered:

33           ci.base_product.name = conf["base_product_name"]

34           ci.base_product.short = conf["base_product_short"]

35           ci.base_product.version = conf["base_product_version"]

36 @@ -57,9 +59,6 @@

37       ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())

38       ci.compose.respin = compose_respin or 0

39   

40 -     # HACK - add topdir for callbacks

41 -     ci.topdir = topdir

42 - 

43       while 1:

44           ci.compose.id = ci.create_compose_id()

45   

46 @@ -113,7 +112,7 @@

47           self.paths = Paths(self)

48   

49           # to provide compose_id, compose_date and compose_respin

50 -         self.ci_base = Compose()

51 +         self.ci_base = ComposeInfo()

52           self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))

53   

54           self.supported = supported

55 @@ -121,7 +120,7 @@

56               self.log_info("Automatically setting 'supported' flag for a Release Candidate (%s) compose." % self.compose_label)

57               self.supported = True

58   

59 -         self.im = ImageManifest()

60 +         self.im = Images()

61           if self.DEBUG:

62               try:

63                   self.im.load(self.paths.compose.metadata("images.json"))

64 @@ -198,6 +197,9 @@

65           self.variants = VariantsXmlParser(file_obj, tree_arches).parse()

66   

67           # populate ci_base with variants - needed for layered-products (compose_id)

68 +         ####FIXME - compose_to_composeinfo is no longer needed and has been

69 +         ####        removed, but I'm not entirely sure what this is needed for

70 +         ####        or if it is at all

71           self.ci_base = compose_to_composeinfo(self)

72   

73       def get_variants(self, types=None, arch=None, recursive=False):
file changed

  1 @@ -73,13 +73,13 @@ 

  2       ci.compose.label = compose.compose_label

  3   

  4       # product

  5 -     ci.product.name = compose.conf["product_name"]

  6 -     ci.product.version = compose.conf["product_version"]

  7 -     ci.product.short = compose.conf["product_short"]

  8 -     ci.product.is_layered = compose.conf.get("product_is_layered", False)

  9 +     ci.release.name = compose.conf["product_name"]

 10 +     ci.release.version = compose.conf["product_version"]

 11 +     ci.release.short = compose.conf["product_short"]

 12 +     ci.release.is_layered = compose.conf.get("product_is_layered", False)

 13   

 14       # base product

 15 -     if ci.product.is_layered:

 16 +     if ci.release.is_layered:

 17           ci.base_product.name = compose.conf["base_product_name"]

 18           ci.base_product.version = compose.conf["base_product_version"]

 19           ci.base_product.short = compose.conf["base_product_short"]

 20 @@ -99,38 +99,38 @@

 21           var.arches = set(variant.arches)

 22   

 23           if var.type == "layered-product":

 24 -             var.product.name = variant.product_name

 25 -             var.product.short = variant.product_short

 26 -             var.product.version = variant.product_version

 27 -             var.product.is_layered = True

 28 +             var.release.name = variant.product_name

 29 +             var.release.short = variant.product_short

 30 +             var.release.version = variant.product_version

 31 +             var.release.is_layered = True

 32   

 33           for arch in variant.arches:

 34               # paths: binaries

 35 -             var.os_tree[arch] = relative_path(compose.paths.compose.os_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 36 -             var.repository[arch] = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 37 -             var.packages[arch] = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 38 +             var.paths.os_tree[arch] = relative_path(compose.paths.compose.os_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 39 +             var.paths.repository[arch] = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 40 +             var.paths.packages[arch] = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 41               iso_dir = compose.paths.compose.iso_dir(arch=arch, variant=variant, create_dir=False) or ""

 42               if iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), iso_dir)):

 43 -                 var.isos[arch] = relative_path(iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 44 +                 var.paths.isos[arch] = relative_path(iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 45               jigdo_dir = compose.paths.compose.jigdo_dir(arch=arch, variant=variant, create_dir=False) or ""

 46               if jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), jigdo_dir)):

 47 -                 var.jigdos[arch] = relative_path(jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 48 +                 var.paths.jigdos[arch] = relative_path(jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 49   

 50               # paths: sources

 51 -             var.source_tree[arch] = relative_path(compose.paths.compose.os_tree(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 52 -             var.source_repository[arch] = relative_path(compose.paths.compose.repository(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 53 -             var.source_packages[arch] = relative_path(compose.paths.compose.packages(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 54 +             var.paths.source_tree[arch] = relative_path(compose.paths.compose.os_tree(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 55 +             var.paths.source_repository[arch] = relative_path(compose.paths.compose.repository(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 56 +             var.paths.source_packages[arch] = relative_path(compose.paths.compose.packages(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 57               source_iso_dir = compose.paths.compose.iso_dir(arch="source", variant=variant, create_dir=False) or ""

 58               if source_iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_iso_dir)):

 59 -                 var.source_isos[arch] = relative_path(source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 60 +                 var.paths.source_isos[arch] = relative_path(source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 61               source_jigdo_dir = compose.paths.compose.jigdo_dir(arch="source", variant=variant, create_dir=False) or ""

 62               if source_jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)):

 63 -                 var.source_jigdos[arch] = relative_path(source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 64 +                 var.paths.source_jigdos[arch] = relative_path(source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 65   

 66               # paths: debug

 67 -             var.debug_tree[arch] = relative_path(compose.paths.compose.debug_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 68 -             var.debug_repository[arch] = relative_path(compose.paths.compose.debug_repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 69 -             var.debug_packages[arch] = relative_path(compose.paths.compose.debug_packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 70 +             var.paths.debug_tree[arch] = relative_path(compose.paths.compose.debug_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 71 +             var.paths.debug_repository[arch] = relative_path(compose.paths.compose.debug_repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 72 +             var.paths.debug_packages[arch] = relative_path(compose.paths.compose.debug_packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")

 73               '''

 74               # XXX: not suported (yet?)

 75               debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""

 76 @@ -186,10 +186,10 @@

 77   

 78           # product

 79           # TODO: read from variants.xml

 80 -         ti.product.name = variant.product_name

 81 -         ti.product.version = variant.product_version

 82 -         ti.product.short = variant.product_short

 83 -         ti.product.is_layered = True

 84 +         ti.release.name = variant.product_name

 85 +         ti.release.version = variant.product_version

 86 +         ti.release.short = variant.product_short

 87 +         ti.release.is_layered = True

 88   

 89           # base product

 90           ti.base_product.name = compose.conf["product_name"]

 91 @@ -201,13 +201,13 @@

 92           ti.base_product.short = compose.conf["product_short"]

 93       else:

 94           # product

 95 -         ti.product.name = compose.conf["product_name"]

 96 -         ti.product.version = compose.conf["product_version"]

 97 -         ti.product.short = compose.conf["product_short"]

 98 -         ti.product.is_layered = compose.conf.get("product_is_layered", False)

 99 +         ti.release.name = compose.conf["product_name"]

100 +         ti.release.version = compose.conf["product_version"]

101 +         ti.release.short = compose.conf["product_short"]

102 +         ti.release.is_layered = compose.conf.get("product_is_layered", False)

103   

104           # base product

105 -         if ti.product.is_layered:

106 +         if ti.release.is_layered:

107               ti.base_product.name = compose.conf["base_product_name"]

108               ti.base_product.version = compose.conf["base_product_version"]

109               ti.base_product.short = compose.conf["base_product_short"]

110 @@ -236,7 +236,7 @@

111       ti.variants.add(var)

112   

113       repomd_path = os.path.join(var.repository, "repodata", "repomd.xml")

114 -     ti.checksums.add(os_tree, repomd_path)

115 +     ti.checksums.add(repomd_path, "sha256", os_tree)

116   

117       for i in variant.get_variants(types=["addon"], arch=arch):

118           addon = productmd.treeinfo.Variant(ti)

119 @@ -251,17 +251,17 @@

120           var.add(addon)

121   

122           repomd_path = os.path.join(addon.repository, "repodata", "repomd.xml")

123 -         ti.checksums.add(os_tree, repomd_path)

124 +         ti.checksums.add(repomd_path, "sha256", os_tree)

125   

126 -     class LoraxProduct(productmd.treeinfo.product.Product):

127 +     class LoraxProduct(productmd.treeinfo.Release):

128           def _check_short(self):

129               # HACK: set self.short so .treeinfo produced by lorax can be read

130               if not self.short:

131                   self.short = compose.conf["product_short"]

132   

133 -     class LoraxTreeInfo(productmd.TreeInfo):

134 +     class LoraxTreeInfo(productmd.treeinfo.TreeInfo):

135           def clear(self):

136 -             productmd.TreeInfo.clear(self)

137 +             super(LoraxTreeInfo, self).clear()

138               self.product = LoraxProduct(self)

139   

140       # images

141 @@ -277,12 +277,12 @@

142               # stage2 - mainimage

143               if bi_ti.stage2.mainimage:

144                   ti.stage2.mainimage = bi_ti.stage2.mainimage

145 -                 ti.checksums.add(os_tree, ti.stage2.mainimage)

146 +                 ti.checksums.add(ti.stage2.mainimage, "sha256", os_tree)

147   

148               # stage2 - instimage

149               if bi_ti.stage2.instimage:

150                   ti.stage2.instimage = bi_ti.stage2.instimage

151 -                 ti.checksums.add(os_tree, ti.stage2.instimage)

152 +                 ti.checksums.add(ti.stage2.instimage, "sha256", os_tree)

153   

154               # images

155               for platform in bi_ti.images.images:

156 @@ -290,7 +290,7 @@

157                   ti.tree.platforms.add(platform)

158                   for image, path in bi_ti.images.images[platform].items():

159                       ti.images.images[platform][image] = path

160 -                     ti.checksums.add(os_tree, path)

161 +                     ti.checksums.add(path, "sha256", os_tree)

162   

163           # add product.img to images-$arch

164           product_img = os.path.join(os_tree, "images", "product.img")

165 @@ -298,7 +298,7 @@

166           if os.path.isfile(product_img):

167               for platform in ti.images.images:

168                   ti.images.images[platform]["product.img"] = product_img_relpath

169 -                 ti.checksums.add(os_tree, product_img_relpath)

170 +                 ti.checksums.add(product_img_relpath, "sha256", os_tree)

171   

172       path = os.path.join(compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo")

173       compose.log_info("Writing treeinfo: %s" % path)
file changed

 1 @@ -26,7 +26,7 @@ 

 2   

 3   from kobo.threads import ThreadPool, WorkerThread

 4   from kobo.shortcuts import run, read_checksum_file, relative_path

 5 - from productmd.imagemanifest import Image

 6 + from productmd.images import Image

 7   

 8   from pungi.util import get_buildroot_rpms, get_volid

 9   from pungi.wrappers.lorax import LoraxWrapper

10 @@ -299,7 +299,7 @@

11           img.volume_id = iso.get_volume_id(new_boot_iso_path)

12       except RuntimeError:

13           pass

14 -     compose.im.add(arch, variant.uid, img)

15 +     compose.im.add(variant.uid, arch, img)

16       compose.log_info("[DONE ] %s" % msg)

17   

18   
file changed

 1 @@ -23,7 +23,7 @@ 

 2   

 3   import koji

 4   import productmd.treeinfo

 5 - from productmd.imagemanifest import Image

 6 + from productmd.images import Image

 7   from kobo.threads import ThreadPool, WorkerThread

 8   from kobo.shortcuts import run, read_checksum_file, relative_path

 9   

10 @@ -275,7 +275,7 @@

11               img.volume_id = iso.get_volume_id(cmd["iso_path"])

12           except RuntimeError:

13               pass

14 -         compose.im.add(cmd["arch"], cmd["variant"].uid, img)

15 +         compose.im.add(cmd["variant"].uid, cmd["arch"], img)

16           # TODO: supported_iso_bit

17           # add: boot.iso

18   
file changed

 1 @@ -21,7 +21,7 @@ 

 2   import json

 3   

 4   from kobo.rpmlib import parse_nvra

 5 - from productmd import RpmManifest

 6 + from productmd.rpms import Rpms

 7   

 8   from pungi.wrappers.scm import get_file_from_scm

 9   from link import link_files

10 @@ -112,7 +112,7 @@

11           pkg_map = gather_wrapper(self.compose, self.pkgset_phase.package_sets, self.pkgset_phase.path_prefix)

12   

13           manifest_file = self.compose.paths.compose.metadata("rpms.json")

14 -         manifest = RpmManifest()

15 +         manifest = Rpms()

16           manifest.compose.id = self.compose.compose_id

17           manifest.compose.type = self.compose.compose_type

18           manifest.compose.date = self.compose.compose_date
file changed

 1 @@ -62,7 +62,7 @@ 

 2           # update rpm manifest

 3           pkg_obj = pkg_set[pkg["path"]]

 4           nevra = pkg_obj.nevra

 5 -         manifest.add("src", variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="source")

 6 +         manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="source")

 7   

 8           # update srpm_map

 9           srpm_map.setdefault(pkg_obj.file_name, nevra)

10 @@ -80,7 +80,7 @@

11           pkg_obj = pkg_set[pkg["path"]]

12           nevra = pkg_obj.nevra

13           src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)

14 -         manifest.add(arch, variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="package", srpm_nevra=src_nevra)

15 +         manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="binary", srpm_nevra=src_nevra)

16   

17       packages_dir = compose.paths.compose.debug_packages(arch, variant)

18       packages_dir_relpath = compose.paths.compose.debug_packages(arch, variant, relative=True)

19 @@ -95,7 +95,7 @@

20           pkg_obj = pkg_set[pkg["path"]]

21           nevra = pkg_obj.nevra

22           src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)

23 -         manifest.add(arch, variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="debug", srpm_nevra=src_nevra)

24 +         manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="debug", srpm_nevra=src_nevra)

25   

26       pool.start()

27       pool.stop()

 1 @@ -69,7 +69,7 @@ 

 2   

 3   def write_pungi_config(compose, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, repos=None, comps_repo=None, package_set=None, fulltree_excludes=None, prepopulate=None):

 4       """write pungi config (kickstart) for arch/variant"""

 5 -     pungi = PungiWrapper()

 6 +     pungi_wrapper = PungiWrapper()

 7       pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)

 8       msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)

 9   

10 @@ -101,17 +101,17 @@

11           else:

12               filter_packages_str.append(pkg_name)

13   

14 -     pungi.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str, exclude_packages=filter_packages_str, comps_repo=comps_repo, lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes, multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist, prepopulate=prepopulate)

15 +     pungi_wrapper.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str, exclude_packages=filter_packages_str, comps_repo=comps_repo, lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes, multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist, prepopulate=prepopulate)

16   

17   

18   def resolve_deps(compose, arch, variant):

19 -     pungi = PungiWrapper()

20 +     pungi_wrapper = PungiWrapper()

21       pungi_log = compose.paths.work.pungi_log(arch, variant)

22   

23       msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant)

24       if compose.DEBUG and os.path.exists(pungi_log):

25           compose.log_warning("[SKIP ] %s" % msg)

26 -         return pungi.get_packages(open(pungi_log, "r").read())

27 +         return pungi_wrapper.get_packages(open(pungi_log, "r").read())

28   

29       compose.log_info("[BEGIN] %s" % msg)

30       pungi_conf = compose.paths.work.pungi_conf(arch, variant)

31 @@ -146,7 +146,7 @@

32       yum_arch = tree_arch_to_yum_arch(arch)

33       tmp_dir = compose.paths.work.tmp_dir(arch, variant)

34       cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)

35 -     cmd = pungi.get_pungi_cmd(pungi_conf, destdir=tmp_dir, name=variant.uid, selfhosting=selfhosting, fulltree=fulltree, arch=yum_arch, full_archlist=True, greedy=greedy_method, cache_dir=cache_dir, lookaside_repos=lookaside_repos, multilib_methods=multilib_methods)

36 +     cmd = pungi_wrapper.get_pungi_cmd(pungi_conf, destdir=tmp_dir, name=variant.uid, selfhosting=selfhosting, fulltree=fulltree, arch=yum_arch, full_archlist=True, greedy=greedy_method, cache_dir=cache_dir, lookaside_repos=lookaside_repos, multilib_methods=multilib_methods)

37       # Use temp working directory directory as workaround for

38       # https://bugzilla.redhat.com/show_bug.cgi?id=795137

39       tmp_dir = tempfile.mkdtemp(prefix="pungi_")

40 @@ -154,7 +154,7 @@

41           run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir)

42       finally:

43           rmtree(tmp_dir)

44 -     result = pungi.get_packages(open(pungi_log, "r").read())

45 +     result = pungi_wrapper.get_packages(open(pungi_log, "r").read())

46   

47       compose.log_info("[DONE ] %s" % msg)

48       return result

49 @@ -165,9 +165,9 @@

50       if not check_deps:

51           return

52   

53 -     pungi = PungiWrapper()

54 +     pungi_wrapper = PungiWrapper()

55       pungi_log = compose.paths.work.pungi_log(arch, variant)

56 -     missing_deps = pungi.get_missing_deps(open(pungi_log, "r").read())

57 +     missing_deps = pungi_wrapper.get_missing_deps(open(pungi_log, "r").read())

58       if missing_deps:

59           for pkg in sorted(missing_deps):

60               compose.log_error("Unresolved dependencies in package %s: %s" % (pkg, sorted(missing_deps[pkg])))

 1 @@ -150,7 +150,7 @@ 

 2   

 3   def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, package_set=None):

 4       """write pungi config (kickstart) for arch/variant"""

 5 -     pungi = PungiWrapper()

 6 +     pungi_wrapper = PungiWrapper()

 7       pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)

 8       msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)

 9   

10 @@ -182,4 +182,4 @@

11           packages.append("system-release")

12   

13       prepopulate = get_prepopulate_packages(compose, arch, None)

14 -     pungi.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)

15 +     pungi_wrapper.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)

1 @@ -36,6 +36,7 @@ 

2       scripts         = [

3           'bin/pungi',

4           'bin/pungi-koji',

5 +         'bin/comps_filter',

6       ],

7       data_files      = [

8           ('/usr/share/pungi', glob.glob('share/*.xsl')),
Changes summary
+1 -0
file changed
+15 -13
file changed
+40 -40
file changed
+2 -2
file changed
+2 -2
file changed
+2 -2
file changed
+3 -3
file changed