#18 rework configuration
Merged 8 years ago by ausil. Opened 8 years ago by dmach.
dmach/pungi config-rework  into  master

file modified
+1
@@ -1,6 +1,7 @@ 

  include AUTHORS

  include COPYING

  include GPL

+ include RELEASE-NOTES

  include pungi.spec

  include share/*

  include share/multilib/*

file added
+22
@@ -0,0 +1,22 @@ 

+ DATE:   2015-08-09

+ CHANGE: Rename product_* config options to release_* for consistency with productmd.

+ ACTION: Rename product_name, product_short, product_version, product_is_layered to release_* in config files.

+         Rename //variant/product to //variant/release in variants XML.

+ 

+ DATE:   2015-08-09

+ CHANGE: pkgset_koji_url and pkgset_koji_path_prefix config options replaced with koji_profile.

+ ACTION: Add 'koji_profile = "<profile_name>"' (use "koji" for Fedora) to config files.

+         You can safely remove and pkgset_koji_url and pkgset_koji_path_prefix from config files.

+ 

+ DATE:   2015-08-08

+ CHANGE: create_optional_isos config option now defaults to False.

+ ACTION: You can safely remove 'create_optional_isos = False' from config files.

+ 

+ DATE:   2015-08-08

+ CHANGE: createrepo_c is config option now defaults to True.

+ ACTION: You can safely remove 'createrepo_c = True' from config files.

+         Set 'createrepo_c = False' if you need legacy createrepo.

+ 

+ DATE:   2015-08-08

+ CHANGE: createrepo_checksum config option is now mandatory.

+ ACTION: Add 'createrepo_checksum = "sha256"' (or "sha") to config files.

file modified
+2 -2
@@ -302,8 +302,8 @@ 

  

      # create a latest symlink

      compose_dir = os.path.basename(compose.topdir)

-     symlink_name = "latest-%s-%s" % (compose.conf["product_short"], ".".join(compose.conf["product_version"].split(".")[:-1]))

-     if compose.conf["product_is_layered"]:

+     symlink_name = "latest-%s-%s" % (compose.conf["release_short"], ".".join(compose.conf["release_version"].split(".")[:-1]))

+     if compose.conf["release_is_layered"]:

          symlink_name += "-%s-%s" % (compose.conf["base_product_short"], compose.conf["base_product_version"])

      symlink = os.path.join(compose.topdir, "..", symlink_name)

  

file added
+401
@@ -0,0 +1,401 @@ 

+ ===============

+  Configuration

+ ===============

+ 

+ Please read

+ `productmd documentation <http://release-engineering.github.io/productmd/index.html>`_

+ for

+ `terminology <http://release-engineering.github.io/productmd/terminology.html>`_

+ and other release and compose related details.

+ 

+ 

+ Minimal Config Example

+ ======================

+ ::

+ 

+     # RELEASE

+     release_name = "Fedora"

+     release_short = "Fedora"

+     release_version = "23"

+ 

+     # GENERAL SETTINGS

+     comps_file = "comps-f23.xml"

+     variants_file = "variants-f23.xml"

+ 

+     # KOJI

+     koji_profile = "koji"

+     runroot = False

+ 

+     # PKGSET

+     sigkeys = [None]

+     pkgset_source = "koji"

+     pkgset_koji_tag = "f23"

+ 

+     # CREATEREPO

+     createrepo_checksum = "sha256"

+ 

+     # GATHER

+     gather_source = "comps"

+     gather_method = "deps"

+     greedy_method = "build"

+     multilib_methods = []

+     check_deps = False

+ 

+     # BUILDINSTALL

+     bootable = True

+     buildinstall_method = "lorax"

+     buildinstall_upgrade_image = True

+ 

+ 

+ Release

+ =======

+ Following **mandatory** options describe a release.

+ 

+ 

+ Options

+ -------

+ 

+ **release_name** [mandatory]

+     (*str*) -- release name

+ 

+ **release_short** [mandatory]

+     (*str*) -- release short name, without spaces and special characters

+ 

+ **release_version** [mandatory]

+     (*str*) -- release version

+ 

+ **release_type** = "ga"

+     (*str*) -- release type, "ga" or "updates"

+ 

+ **release_is_layered** = False

+     (*bool*) -- typically False for an operating system, True otherwise

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     release_name = "Fedora"

+     release_short = "Fedora"

+     release_version = "23"

+     # release_type = "ga"

+ 

+ 

+ Base Product

+ ============

+ Base product options are **optional** and we need

+ to them only if we're composing a layered product

+ built on another (base) product.

+ 

+ 

+ Options

+ -------

+ 

+ **base_product_name**

+     (*str*) -- base product name

+ 

+ **base_product_short**

+     (*str*) -- base product short name, without spaces and special characters

+ 

+ **base_product_version**

+     (*str*) -- base product **major** version

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     release_name = "RPM Fusion"

+     release_short = "rf"

+     release_version = "23.0"

+ 

+     release_is_layered = True

+ 

+     base_product_name = "Fedora"

+     base_product_short = "Fedora"

+     base_product_version = "23"

+ 

+ 

+ General Settings

+ ================

+ 

+ Options

+ -------

+ 

+ **comps_file** [mandatory]

+     (*scm_dict*, *str* or None) -- reference to comps XML file with installation groups

+ 

+ **variants_file** [mandatory]

+     (*scm_dict* or *str*) -- reference to variants XML file that defines release variants and architectures

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     comps_file = {

+         "scm": "git",

+         "repo": "https://git.fedorahosted.org/git/comps.git",

+         "branch": None,

+         "file": "comps-f23.xml.in",

+     }

+ 

+     variants_file = {

+         "scm": "git",

+         "repo": "https://pagure.io/pungi-fedora.git ",

+         "branch": None,

+         "file": "variants-fedora.xml",

+     }

+ 

+ 

+ 

+ 

+ Createrepo Settings

+ ===================

+ 

+ 

+ Options

+ -------

+ 

+ **createrepo_checksum** [mandatory]

+     (*str*) -- specify checksum type for createrepo; expected values: sha256, sha

+ 

+ **createrepo_c** = True

+     (*bool*) -- use createrepo_c (True) or legacy createrepo (False)

+ 

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     createrepo_checksum = "sha256"

+ 

+ 

+ Package Set Settings

+ ====================

+ 

+ 

+ Options

+ -------

+ 

+ **sigkeys**

+     ([*str* or None]) -- priority list of sigkeys, *None* means unsigned

+ 

+ **pkgset_source** [mandatory]

+     (*str*) -- "koji" (any koji instance) or "repos" (arbitrary yum repositories)

+ 

+ **pkgset_koji_tag** [mandatory]

+     (*str*) -- tag to read package set from

+ 

+ **pkgset_koji_inherit** = True

+     (*bool*) -- inherit builds from parent tags; we can turn it off only if we have all builds tagged in a single tag

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     sigkeys = [None]

+     pkgset_source = "koji"

+     pkgset_koji_tag = "f23"

+ 

+ 

+ Buildinstall Settings

+ =====================

+ Script or process that creates bootable images with

+ Anaconda installer is historically called

+ `buildinstall <https://git.fedorahosted.org/cgit/anaconda.git/tree/scripts/buildinstall?h=f15-branch>`_.

+ 

+ Options:

+ 

+ * bootable (*bool*) -- 

+ * buildinstall_method (*str*) -- "lorax" (f16+, rhel7+) or "buildinstall" (older releases)

+ * buildinstall_upgrade_image (*bool*) -- build upgrade images, applicable on "lorax" buildinstall method

+ 

+ Example::

+ 

+     bootable = True

+     buildinstall_method = "lorax"

+ 

+ 

+ .. note::

+ 

+     It is advised to run buildinstall (lorax) in koji,

+     i.e. with **runroot enabled** for clean build environments, better logging, etc.

+ 

+ 

+ .. warning::

+ 

+     Lorax installs RPMs into a chroot. This involves running %post scriptlets

+     and they frequently run executables in the chroot.

+     If we're composing for multiple architectures, we **must** use runroot for this reason.

+ 

+ 

+ Gather Settings

+ ===============

+ 

+ Options

+ -------

+ 

+ **gather_source** [mandatory]

+     (*str*) -- from where to read initial package list; expected values: "comps", "none"

+ 

+ **gather_method** [mandatory]

+     (*str*) -- "deps", "nodeps"

+ 

+ **greedy_method**

+     (*str*) -- see :doc:`gather`, recommended value: "build"

+ 

+ **multilib_methods** = []

+     ([*str*]) -- see :doc:`gather`, recommended value: ["devel", "runtime"]

+ 

+ **multilib_arches**

+     ([*str*] or None) -- list of compose architectures entitled for multilib; set to None to apply multilib on all compose arches

+ 

+ **additional_packages**

+     (*list*) -- additional packages to be included in a variant and architecture; format: [(variant_uid_regex, {arch|*: [package_globs]})]

+ 

+ **filter_packages**

+     (*list*) -- packages to be excluded from a variant and architecture; format: [(variant_uid_regex, {arch|*: [package_globs]})]

+ 

+ **multilib_blacklist**

+     (*dict*) -- multilib blacklist; format: {arch|*: [package_globs]}

+ 

+ **multilib_whitelist**

+     (*dict*) -- multilib blacklist; format: {arch|*: [package_globs]}

+ 

+ **gather_lookaside_repos** = []

+     (*list*) -- lookaside repositories used for package gathering; format: [(variant_uid_regex, {arch|*: [repo_urls]})]

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     gather_source = "comps"

+     gather_method = "deps"

+     greedy_method = "build"

+     multilib_methods = ["devel", "runtime"]

+     multilib_arches = ["ppc64", "s390x", "x86_64"]

+     check_deps = False

+ 

+     additional_packages = [

+         # bz#123456

+         ('^(Workstation|Server)$', {

+             '*': [

+                 'grub2',

+                 'kernel',

+             ],

+         }),

+     ]

+ 

+     filter_packages = [

+         # bz#111222

+         ('^.*$', {

+             '*': [

+                 'kernel-doc',

+             ],

+         }),

+     ]

+ 

+     multilib_blacklist = {

+         "*": [

+             "gcc",

+         ],

+     }

+ 

+     multilib_whitelist = {

+         "*": [

+             "alsa-plugins-*",

+         ],

+     }

+ 

+     # gather_lookaside_repos = [

+     #     ('^.*$', {

+     #         'x86_64': [

+     #             "https://dl.fedoraproject.org/pub/fedora/linux/releases/22/Everything/x86_64/os/",

+     #             "https://dl.fedoraproject.org/pub/fedora/linux/releases/22/Everything/source/SRPMS/",

+     #         ]

+     #     }),

+     # ]

+ 

+ 

+ .. note::

+ 

+    It is a good practice to attach bug/ticket numbers

+    to additional_packages, filter_packages, multilib_blacklist and multilib_whitelist

+    to track decisions.

+ 

+ 

+ Koji Settings

+ =============

+ 

+ 

+ Options

+ -------

+ 

+ **koji_profile**

+     (*str*) -- koji profile name

+ 

+ **runroot** [mandatory]

+     (*bool*) -- run some tasks such as buildinstall or createiso in koji build root (True) or locally (False)

+ 

+ **runroot_channel**

+     (*str*) -- name of koji channel

+ 

+ **runroot_tag**

+     (*str*) -- name of koji **build** tag used for runroot

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     koji_profile = "koji"

+     runroot = True

+     runroot_channel = "runroot"

+     runroot_tag = "f23-build"

+ 

+ 

+ Extra Files Settings

+ ====================

+ 

+ 

+ Options

+ -------

+ 

+ **extra_files**

+     (*list*) -- references to external files to be placed in os/ directory and media; format: [(variant_uid_regex, {arch|*: [scm_dicts]})]

+ 

+ 

+ Example

+ -------

+ ::

+ 

+     extra_files = [

+         ('^.*$', {

+             '*': [

+                 # GPG keys

+                 {

+                     "scm": "rpm",

+                     "repo": "fedora-repos",

+                     "branch": None,

+                     "file": [

+                         "/etc/pki/rpm-gpg/RPM-GPG-KEY-22-fedora",

+                     ],

+                     "target": "",

+                 },

+                 # GPL

+                 {

+                     "scm": "git",

+                     "repo": "https://pagure.io/pungi-fedora",

+                     "branch": None,

+                     "file": [

+                         "GPL",

+                     ],

+                     "target": "",

+                 },

+             ],

+         }),

+     ]

file modified
+1
@@ -14,6 +14,7 @@ 

      about

      contributing

      testing

+     configuration

  

  

  Indices and tables

file modified
+5 -8
@@ -42,14 +42,11 @@ 

  

      # create an incomplete composeinfo to generate compose ID

      ci = ComposeInfo()

-     ci.compose.name = conf["product_name"]

-     ci.release.name = conf["product_name"]

-     ci.compose.short = conf["product_short"]

-     ci.release.short = conf["product_short"]

-     ci.compose.version = conf["product_version"]

-     ci.release.version = conf["product_version"]

-     ci.compose.is_layered = bool(conf.get("product_is_layered", False))

-     if ci.compose.is_layered:

+     ci.release.name = conf["release_name"]

+     ci.release.short = conf["release_short"]

+     ci.release.version = conf["release_version"]

+     ci.release.is_layered = bool(conf.get("release_is_layered", False))

+     if ci.release.is_layered:

          ci.base_product.name = conf["base_product_name"]

          ci.base_product.short = conf["base_product_short"]

          ci.base_product.version = conf["base_product_version"]

file modified
+27 -27
@@ -28,14 +28,14 @@ 

  

  

  def get_description(compose, variant, arch):

-     if "product_discinfo_description" in compose.conf:

-         result = compose.conf["product_discinfo_description"]

+     if "release_discinfo_description" in compose.conf:

+         result = compose.conf["release_discinfo_description"]

      elif variant.type == "layered-product":

          # we need to make sure the layered product behaves as it was composed separately

-         result = "%s %s for %s %s" % (variant.product_name, variant.product_version, compose.conf["product_name"], get_major_version(compose.conf["product_version"]))

+         result = "%s %s for %s %s" % (variant.release_name, variant.release_version, compose.conf["release_name"], get_major_version(compose.conf["release_version"]))

      else:

-         result = "%s %s" % (compose.conf["product_name"], compose.conf["product_version"])

-         if compose.conf.get("product_is_layered", False):

+         result = "%s %s" % (compose.conf["release_name"], compose.conf["release_version"])

+         if compose.conf.get("release_is_layered", False):

              result += "for %s %s" % (compose.conf["base_product_name"], compose.conf["base_product_version"])

  

      result = result % {"variant_name": variant.name, "arch": arch}
@@ -73,10 +73,10 @@ 

      ci.compose.label = compose.compose_label

  

      # product

-     ci.release.name = compose.conf["product_name"]

-     ci.release.version = compose.conf["product_version"]

-     ci.release.short = compose.conf["product_short"]

-     ci.release.is_layered = compose.conf.get("product_is_layered", False)

+     ci.release.name = compose.conf["release_name"]

+     ci.release.version = compose.conf["release_version"]

+     ci.release.short = compose.conf["release_short"]

+     ci.release.is_layered = compose.conf.get("release_is_layered", False)

  

      # base product

      if ci.release.is_layered:
@@ -99,9 +99,9 @@ 

          var.arches = set(variant.arches)

  

          if var.type == "layered-product":

-             var.release.name = variant.product_name

-             var.release.short = variant.product_short

-             var.release.version = variant.product_version

+             var.release.name = variant.release_name

+             var.release.short = variant.release_short

+             var.release.version = variant.release_version

              var.release.is_layered = True

  

          for arch in variant.arches:
@@ -184,27 +184,27 @@ 

      if variant.type == "layered-product":

          # we need to make sure the layered product behaves as it was composed separately

  

-         # product

+         # release

          # TODO: read from variants.xml

-         ti.release.name = variant.product_name

-         ti.release.version = variant.product_version

-         ti.release.short = variant.product_short

+         ti.release.name = variant.release_name

+         ti.release.version = variant.release_version

+         ti.release.short = variant.release_short

          ti.release.is_layered = True

  

          # base product

-         ti.base_product.name = compose.conf["product_name"]

-         if "." in compose.conf["product_version"]:

+         ti.base_product.name = compose.conf["release_name"]

+         if "." in compose.conf["release_version"]:

              # remove minor version if present

-             ti.base_product.version = get_major_version(compose.conf["product_version"])

+             ti.base_product.version = get_major_version(compose.conf["release_version"])

          else:

-             ti.base_product.version = compose.conf["product_version"]

-         ti.base_product.short = compose.conf["product_short"]

+             ti.base_product.version = compose.conf["release_version"]

+         ti.base_product.short = compose.conf["release_short"]

      else:

-         # product

-         ti.release.name = compose.conf["product_name"]

-         ti.release.version = compose.conf["product_version"]

-         ti.release.short = compose.conf["product_short"]

-         ti.release.is_layered = compose.conf.get("product_is_layered", False)

+         # release

+         ti.release.name = compose.conf["release_name"]

+         ti.release.version = compose.conf["release_version"]

+         ti.release.short = compose.conf["release_short"]

+         ti.release.is_layered = compose.conf.get("release_is_layered", False)

  

          # base product

          if ti.release.is_layered:
@@ -257,7 +257,7 @@ 

          def _check_short(self):

              # HACK: set self.short so .treeinfo produced by lorax can be read

              if not self.short:

-                 self.short = compose.conf["product_short"]

+                 self.short = compose.conf["release_short"]

  

      class LoraxTreeInfo(productmd.treeinfo.TreeInfo):

          def clear(self):

file modified
+2 -2
@@ -436,7 +436,7 @@ 

          if variant.type == "addon":

              return None

          if variant.type == "optional":

-             if not self.compose.conf["create_optional_isos"]:

+             if not self.compose.conf.get("create_optional_isos", False):

                  return None

          if arch == "src":

              arch = "source"
@@ -502,7 +502,7 @@ 

          if variant.type == "addon":

              return None

          if variant.type == "optional":

-             if not self.compose.conf["create_optional_isos"]:

+             if not self.compose.conf.get("create_optional_isos", False):

                  return None

          if arch == "src":

              arch = "source"

file modified
+3 -3
@@ -80,9 +80,9 @@ 

  

      def run(self):

          lorax = LoraxWrapper()

-         product = self.compose.conf["product_name"]

-         version = self.compose.conf["product_version"]

-         release = self.compose.conf["product_version"]

+         product = self.compose.conf["release_name"]

+         version = self.compose.conf["release_version"]

+         release = self.compose.conf["release_version"]

          noupgrade = not self.compose.conf.get("buildinstall_upgrade_image", False)

          buildinstall_method = self.compose.conf["buildinstall_method"]

  

file modified
+2 -2
@@ -375,8 +375,8 @@ 

              del ti.checksums.checksums["repodata/repomd.xml"]

  

          # rebuild repodata

-         createrepo_c = compose.conf.get("createrepo_c", False)

-         createrepo_checksum = compose.conf.get("createrepo_checksum", None)

+         createrepo_c = compose.conf.get("createrepo_c", True)

+         createrepo_checksum = compose.conf["createrepo_checksum"]

          repo = CreaterepoWrapper(createrepo_c=createrepo_c)

  

          file_list = "%s-file-list" % iso_dir

file modified
+3 -3
@@ -50,7 +50,7 @@ 

          {

              "name": "createrepo_checksum",

              "expected_types": [bool],

-             "optional": True,

+             "expected_values": ["sha256", "sha"],

          },

          {

              "name": "product_id",
@@ -85,8 +85,8 @@ 

  

  

  def create_variant_repo(compose, arch, variant, pkg_type):

-     createrepo_c = compose.conf.get("createrepo_c", False)

-     createrepo_checksum = compose.conf.get("createrepo_checksum", None)

+     createrepo_c = compose.conf.get("createrepo_c", True)

+     createrepo_checksum = compose.conf["createrepo_checksum"]

      repo = CreaterepoWrapper(createrepo_c=createrepo_c)

      if pkg_type == "srpm":

          repo_dir_arch = compose.paths.work.arch_repo(arch="global")

@@ -105,7 +105,7 @@ 

  

      def check_config(self):

          errors = []

-         for i in ["product_name", "product_short", "product_version"]:

+         for i in ["release_name", "release_short", "release_version"]:

              errors.append(self.conf_assert_str(i))

  

      def run(self):

@@ -151,7 +151,7 @@ 

      # https://bugzilla.redhat.com/show_bug.cgi?id=795137

      tmp_dir = tempfile.mkdtemp(prefix="pungi_")

      try:

-         run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir)

+         run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir, env=os.environ)

      finally:

          rmtree(tmp_dir)

      result = pungi_wrapper.get_packages(open(pungi_log, "r").read())

file modified
+8 -9
@@ -35,25 +35,25 @@ 

      config_options = (

          # PRODUCT INFO

          {

-             "name": "product_name",

+             "name": "release_name",

              "expected_types": [str],

          },

          {

-             "name": "product_short",

+             "name": "release_short",

              "expected_types": [str],

          },

          {

-             "name": "product_version",

+             "name": "release_version",

              "expected_types": [str],

          },

          {

              # override description in .discinfo; accepts %(variant_name)s and %(arch)s variables

-             "name": "product_discinfo_description",

+             "name": "release_discinfo_description",

              "expected_types": [str],

              "optional": True,

          },

          {

-             "name": "product_is_layered",

+             "name": "release_is_layered",

              "expected_types": [bool],

              "requires": (

                  (lambda x: bool(x), ["base_product_name", "base_product_short", "base_product_version"]),
@@ -125,7 +125,6 @@ 

              "name": "createrepo_checksum",

              "expected_types": [str],

              "expected_values": ["sha256", "sha"],

-             "optional": True,

          },

  

          # RUNROOT SETTINGS
@@ -133,7 +132,7 @@ 

              "name": "runroot",

              "expected_types": [bool],

              "requires": (

-                 (lambda x: bool(x), ["runroot_tag", "runroot_channel"]),

+                 (lambda x: bool(x), ["koji_profile", "runroot_tag", "runroot_channel"]),

              ),

              "conflicts": (

                  (lambda x: not bool(x), ["runroot_tag", "runroot_channel"]),
@@ -252,8 +251,8 @@ 

      if not compose.has_comps:

          return

  

-     createrepo_c = compose.conf.get("createrepo_c", False)

-     createrepo_checksum = compose.conf.get("createrepo_checksum", None)

+     createrepo_c = compose.conf.get("createrepo_c", True)

+     createrepo_checksum = compose.conf["createrepo_checksum"]

      repo = CreaterepoWrapper(createrepo_c=createrepo_c)

      comps_repo = compose.paths.work.comps_repo(arch=arch)

      comps_path = compose.paths.work.comps(arch=arch)

@@ -39,8 +39,8 @@ 

  

  

  def create_global_repo(compose, path_prefix):

-     createrepo_c = compose.conf.get("createrepo_c", False)

-     createrepo_checksum = compose.conf.get("createrepo_checksum", None)

+     createrepo_c = compose.conf.get("createrepo_c", True)

+     createrepo_checksum = compose.conf["createrepo_checksum"]

      repo = CreaterepoWrapper(createrepo_c=createrepo_c)

      repo_dir_global = compose.paths.work.arch_repo(arch="global")

      msg = "Running createrepo for the global package set"
@@ -55,7 +55,7 @@ 

      old_compose_path = None

      update_md_path = None

      if compose.old_composes:

-         old_compose_path = find_old_compose(compose.old_composes, compose.conf["product_short"], compose.conf["product_version"], compose.conf.get("base_product_short", None), compose.conf.get("base_product_version", None))

+         old_compose_path = find_old_compose(compose.old_composes, compose.conf["release_short"], compose.conf["release_version"], compose.conf.get("base_product_short", None), compose.conf.get("base_product_version", None))

          if old_compose_path is None:

              compose.log_info("No suitable old compose found in: %s" % compose.old_composes)

          else:
@@ -73,8 +73,8 @@ 

  

  

  def create_arch_repos(compose, arch, path_prefix):

-     createrepo_c = compose.conf.get("createrepo_c", False)

-     createrepo_checksum = compose.conf.get("createrepo_checksum", None)

+     createrepo_c = compose.conf.get("createrepo_c", True)

+     createrepo_checksum = compose.conf["createrepo_checksum"]

      repo = CreaterepoWrapper(createrepo_c=createrepo_c)

      repo_dir_global = compose.paths.work.arch_repo(arch="global")

      repo_dir = compose.paths.work.arch_repo(arch=arch)
@@ -93,7 +93,7 @@ 

      compose.log_info("[DONE ] %s" % msg)

  

  

- def find_old_compose(old_compose_dirs, product_short, product_version, base_product_short=None, base_product_version=None):

+ def find_old_compose(old_compose_dirs, release_short, release_version, base_product_short=None, base_product_version=None):

      composes = []

  

      for compose_dir in force_list(old_compose_dirs):
@@ -104,7 +104,7 @@ 

          for i in os.listdir(compose_dir):

              # TODO: read .composeinfo

  

-             pattern = "%s-%s" % (product_short, product_version)

+             pattern = "%s-%s" % (release_short, release_version)

              if base_product_short:

                  pattern += "-%s" % base_product_short

              if base_product_version:

file modified
+12 -15
@@ -29,6 +29,7 @@ 

  

  from kobo.threads import WorkerThread, ThreadPool

  

+ import pungi.wrappers.kojiwrapper

  from pungi.util import pkg_is_srpm

  from pungi.arch import get_valid_arches

  
@@ -195,31 +196,27 @@ 

  

  

  class KojiPackageSet(PackageSetBase):

-     def __init__(self, koji_proxy, sigkey_ordering, arches=None, logger=None):

+     def __init__(self, koji_wrapper, sigkey_ordering, arches=None, logger=None):

          PackageSetBase.__init__(self, sigkey_ordering=sigkey_ordering, arches=arches, logger=logger)

-         self.koji_proxy = koji_proxy

-         self.koji_pathinfo = getattr(__import__(koji_proxy.__module__, {}, {}, []), "pathinfo")

+         self.koji_wrapper = koji_wrapper

  

      def __getstate__(self):

          result = self.__dict__.copy()

-         result["koji_class"] = self.koji_proxy.__class__.__name__

-         result["koji_module"] = self.koji_proxy.__class__.__module__

-         result["koji_baseurl"] = self.koji_proxy.baseurl

-         result["koji_opts"] = self.koji_proxy.opts

-         del result["koji_proxy"]

-         del result["koji_pathinfo"]

+         result["koji_profile"] = self.koji_wrapper.koji_module.config.profile

+         del result["koji_wrapper"]

          del result["_logger"]

          return result

  

      def __setstate__(self, data):

-         class_name = data.pop("koji_class")

-         module_name = data.pop("koji_module")

-         module = __import__(module_name, {}, {}, [class_name])

-         cls = getattr(module, class_name)

-         self.koji_proxy = cls(data.pop("koji_baseurl"), data.pop("koji_opts"))

+         koji_profile = data.pop("koji_profile")

+         self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)

          self._logger = None

          self.__dict__.update(data)

  

+     @property

+     def koji_proxy(self):

+         return self.koji_wrapper.koji_proxy

+ 

      def get_latest_rpms(self, tag, event, inherit=True):

          return self.koji_proxy.listTaggedRPMS(tag, event=event, inherit=inherit, latest=True)

  
@@ -227,7 +224,7 @@ 

          rpm_info, build_info = queue_item

          rpm_path = None

          found = False

-         pathinfo = self.koji_pathinfo

+         pathinfo = self.koji_wrapper.koji_module.pathinfo

          for sigkey in self.sigkey_ordering:

              if sigkey is None:

                  # we're looking for *signed* copies here

@@ -21,6 +21,7 @@ 

  

  import koji

  

+ import pungi.wrappers.kojiwrapper

  import pungi.phases.pkgset.pkgsets

  from pungi.arch import get_valid_arches

  
@@ -34,13 +35,13 @@ 

      enabled = True

      config_options = (

          {

-             "name": "pkgset_source",

+             "name": "koji_profile",

              "expected_types": [str],

-             "expected_values": "koji",

          },

          {

-             "name": "pkgset_koji_url",

+             "name": "pkgset_source",

              "expected_types": [str],

+             "expected_values": "koji",

          },

          {

              "name": "pkgset_koji_tag",
@@ -51,48 +52,23 @@ 

              "expected_types": [bool],

              "optional": True,

          },

-         {

-             "name": "pkgset_koji_path_prefix",

-             "expected_types": [str],

-         },

      )

  

      def __call__(self):

          compose = self.compose

-         koji_url = compose.conf["pkgset_koji_url"]

-         # koji_tag = compose.conf["pkgset_koji_tag"]

-         path_prefix = compose.conf["pkgset_koji_path_prefix"].rstrip("/") + "/"  # must contain trailing '/'

- 

-         koji_proxy = koji.ClientSession(koji_url)

-         package_sets = get_pkgset_from_koji(self.compose, koji_proxy, path_prefix)

+         koji_profile = compose.conf["koji_profile"]

+         self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)

+         path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"  # must contain trailing '/'

+         package_sets = get_pkgset_from_koji(self.compose, self.koji_wrapper, path_prefix)

          return (package_sets, path_prefix)

  

  

- '''

- class PkgsetKojiPhase(PhaseBase):

-     """PKGSET"""

-     name = "pkgset"

- 

-     def __init__(self, compose):

-         PhaseBase.__init__(self, compose)

-         self.package_sets = None

-         self.path_prefix = None

- 

-     def run(self):

-         path_prefix = self.compose.conf["koji_path_prefix"]

-         path_prefix = path_prefix.rstrip("/") + "/" # must contain trailing '/'

-         koji_url = self.compose.conf["koji_url"]

-         koji_proxy = koji.ClientSession(koji_url)

-         self.package_sets = get_pkgset_from_koji(self.compose, koji_proxy, path_prefix)

-         self.path_prefix = path_prefix

- '''

- 

- 

- def get_pkgset_from_koji(compose, koji_proxy, path_prefix):

-     event_info = get_koji_event_info(compose, koji_proxy)

-     tag_info = get_koji_tag_info(compose, koji_proxy)

+ def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):

+     koji_proxy = koji_wrapper.koji_proxy

+     event_info = get_koji_event_info(compose, koji_wrapper)

+     tag_info = get_koji_tag_info(compose, koji_wrapper)

  

-     pkgset_global = populate_global_pkgset(compose, koji_proxy, path_prefix, tag_info, event_info)

+     pkgset_global = populate_global_pkgset(compose, koji_wrapper, path_prefix, tag_info, event_info)

  #    get_extra_packages(compose, pkgset_global)

      package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)

      package_sets["global"] = pkgset_global
@@ -105,7 +81,8 @@ 

      return package_sets

  

  

- def populate_global_pkgset(compose, koji_proxy, path_prefix, compose_tag, event_id):

+ def populate_global_pkgset(compose, koji_wrapper, path_prefix, compose_tag, event_id):

+     koji_proxy = koji_wrapper.koji_proxy

      ALL_ARCHES = set(["src"])

      for arch in compose.get_arches():

          is_multilib = arch in compose.conf["multilib_arches"]
@@ -121,7 +98,7 @@ 

          pkgset = pickle.load(open(global_pkgset_path, "r"))

      else:

          compose.log_info(msg)

-         pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(koji_proxy, compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES)

+         pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES)

          pkgset.populate(compose_tag, event_id, inherit=inherit)

          f = open(global_pkgset_path, "w")

          data = pickle.dumps(pkgset)
@@ -133,7 +110,8 @@ 

      return pkgset

  

  

- def get_koji_event_info(compose, koji_proxy):

+ def get_koji_event_info(compose, koji_wrapper):

+     koji_proxy = koji_wrapper.koji_proxy

      event_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-event")

  

      if compose.koji_event:
@@ -153,7 +131,8 @@ 

      return result

  

  

- def get_koji_tag_info(compose, koji_proxy):

+ def get_koji_tag_info(compose, koji_wrapper):

+     koji_proxy = koji_wrapper.koji_proxy

      tag_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-tag")

      msg = "Getting a koji tag info"

      if compose.DEBUG and os.path.exists(tag_file):

file modified
+1 -22
@@ -41,27 +41,6 @@ 

      msg = "Running repoclosure"

      compose.log_info("[BEGIN] %s" % msg)

  

-     # Arch repos

-     for arch in compose.get_arches():

-         is_multilib = arch in compose.conf["multilib_arches"]

-         arches = get_valid_arches(arch, is_multilib)

-         repo_id = "repoclosure-%s" % arch

-         repo_dir = compose.paths.work.arch_repo(arch=arch)

- 

-         lookaside = {}

-         if compose.conf.get("product_is_layered", False):

-             for i, lookaside_url in enumerate(get_lookaside_repos(compose, arch, None)):

-                 lookaside["lookaside-%s-%s" % (arch, i)] = lookaside_url

- 

-         cmd = repoclosure.get_repoclosure_cmd(repos={repo_id: repo_dir}, lookaside=lookaside, arch=arches)

-         # Use temp working directory directory as workaround for

-         # https://bugzilla.redhat.com/show_bug.cgi?id=795137

-         tmp_dir = tempfile.mkdtemp(prefix="repoclosure_")

-         try:

-             run(cmd, logfile=compose.paths.log.log_file(arch, "repoclosure"), show_cmd=True, can_fail=True, workdir=tmp_dir)

-         finally:

-             rmtree(tmp_dir)

- 

      # Variant repos

      all_repos = {}  # to be used as lookaside for the self-hosting check

      all_arches = set()
@@ -81,7 +60,7 @@ 

              repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)

              repos[repo_id] = repo_dir

  

-             if compose.conf.get("product_is_layered", False):

+             if compose.conf.get("release_is_layered", False):

                  for i, lookaside_url in enumerate(get_lookaside_repos(compose, arch, variant)):

                      lookaside["lookaside-%s.%s-%s" % (variant.uid, arch, i)] = lookaside_url

  

file modified
+13 -13
@@ -262,31 +262,31 @@ 

          return None

  

      if variant and variant.type == "layered-product":

-         product_short = variant.product_short

-         product_version = variant.product_version

-         product_is_layered = True

-         base_product_short = compose.conf["product_short"]

-         base_product_version = get_major_version(compose.conf["product_version"])

+         release_short = variant.release_short

+         release_version = variant.release_version

+         release_is_layered = True

+         base_product_short = compose.conf["release_short"]

+         base_product_version = get_major_version(compose.conf["release_version"])

          variant_uid = variant.parent.uid

      else:

-         product_short = compose.conf["product_short"]

-         product_version = compose.conf["product_version"]

-         product_is_layered = compose.conf["product_is_layered"]

+         release_short = compose.conf["release_short"]

+         release_version = compose.conf["release_version"]

+         release_is_layered = compose.conf["release_is_layered"]

          base_product_short = compose.conf.get("base_product_short", "")

          base_product_version = compose.conf.get("base_product_version", "")

          variant_uid = variant and variant.uid or None

  

      products = [

-         "%(product_short)s-%(product_version)s %(variant_uid)s.%(arch)s",

-         "%(product_short)s-%(product_version)s %(arch)s",

+         "%(release_short)s-%(release_version)s %(variant_uid)s.%(arch)s",

+         "%(release_short)s-%(release_version)s %(arch)s",

      ]

      layered_products = [

-         "%(product_short)s-%(product_version)s %(base_product_short)s-%(base_product_version)s %(variant_uid)s.%(arch)s",

-         "%(product_short)s-%(product_version)s %(base_product_short)s-%(base_product_version)s %(arch)s",

+         "%(release_short)s-%(release_version)s %(base_product_short)s-%(base_product_version)s %(variant_uid)s.%(arch)s",

+         "%(release_short)s-%(release_version)s %(base_product_short)s-%(base_product_version)s %(arch)s",

      ]

  

      volid = None

-     if product_is_layered:

+     if release_is_layered:

          all_products = layered_products + products

      else:

          all_products = products

file modified
+1 -1
@@ -19,7 +19,7 @@ 

  

  

  class CreaterepoWrapper(object):

-     def __init__(self, createrepo_c=False):

+     def __init__(self, createrepo_c=True):

          if createrepo_c:

              self.createrepo = "createrepo_c"

              self.mergerepo = "mergerepo_c"

@@ -29,7 +29,8 @@ 

          self.profile = profile

          # assumption: profile name equals executable name (it's a symlink -> koji)

          self.executable = self.profile.replace("_", "-")

-         self.koji_module = __import__(self.profile)

+         self.koji_module = koji.get_profile_module(profile)

+         self.koji_proxy = koji.ClientSession(self.koji_module.config.server)

  

      def get_runroot_cmd(self, target, arch, command, quiet=False, use_shell=True, channel=None, packages=None, mounts=None, weight=None, task_id=True):

          cmd = [self.executable, "runroot"]

file modified
+4 -4
@@ -109,10 +109,10 @@ 

  

          variant = Variant(**variant_dict)

          if variant.type == "layered-product":

-             product_node = variant_node.xpath("product")[0]

-             variant.product_name = str(product_node.attrib["name"])

-             variant.product_version = str(product_node.attrib["version"])

-             variant.product_short = str(product_node.attrib["short"])

+             release_node = variant_node.xpath("release")[0]

+             variant.release_name = str(release_node.attrib["name"])

+             variant.release_version = str(release_node.attrib["version"])

+             variant.release_short = str(release_node.attrib["short"])

  

          contains_optional = False

          for child_node in variant_node.xpath("variants/variant"):

file modified
+3 -3
@@ -1,6 +1,6 @@ 

  <!ELEMENT variants (ref*,variant*)>

  

- <!ELEMENT variant (product?,arches,groups,environments*,variants*)?>

+ <!ELEMENT variant (release?,arches,groups,environments*,variants*)?>

  <!ATTLIST variant

      id ID #REQUIRED

      name CDATA #REQUIRED
@@ -8,8 +8,8 @@ 

      has_optional (true|false) #IMPLIED

  >

  

- <!ELEMENT product (#PCDATA)>

- <!ATTLIST product

+ <!ELEMENT release (#PCDATA)>

+ <!ATTLIST release

      name CDATA #IMPLIED

      short CDATA #IMPLIED

      version CDATA #IMPLIED

file modified
+19 -60
@@ -1,40 +1,23 @@ 

- # PRODUCT (RELEASE) INFO

- product_name = "Dummy Product"

- product_short = "DP"

- product_version = "1.0"

- product_is_layered = False

- product_type = "ga"

+ # RELEASE

+ release_name = "Dummy Product"

+ release_short = "DP"

+ release_version = "1.0"

+ release_is_layered = False

+ release_type = "ga"

  

  

  # GENERAL SETTINGS

- bootable = False

  comps_file = "dummy-comps.xml"

  variants_file = "dummy-variants.xml"

- sigkeys = [None] # None = unsigned

- 

- # limit tree architectures

- # if undefined, all architectures from variants.xml will be included

- atree_arches = ["x86_64"]

  

- # limit tree variants

- # if undefined, all variants from variants.xml will be included

- #tree_variants = ["Server"]

- 

- multilib_arches = ["ppc64", "x86_64", "s390x"]

- multilib_methods = ["devel", "runtime"] # devel (recommended), all, base, file, kernel, none, runtime

  

- 

- # RUNROOT settings

+ # KOJI SETTINGS

  runroot = False

- #runroot_channel = ""

- #runroot_tag = ""

  

  

  # PKGSET

- pkgset_source = "repos" # koji, repos

- 

- # PKGSET - REPOS

- # pkgset_repos format: {arch: [repo1_url, repo2_url, ...]}

+ sigkeys = [None]

+ pkgset_source = "repos"

  pkgset_repos = {

      "i386": [

          "repo",
@@ -47,47 +30,23 @@ 

      ],

  }

  

- # PKGSET - KOJI

- #pkgset_koji_path_prefix = "/mnt/koji"

- #pkgset_koji_url = ""

- #pkgset_koji_tag = ""

+ 

+ # CREATEREPO

+ createrepo_checksum = "sha256"

  

  

  # GATHER

  gather_source = "comps"

  gather_method = "deps"

- check_deps = False

  greedy_method = "build"

+ check_deps = False

  

- # fomat: [(variant_uid_regex, {arch|*: [repos]})]

- # gather_lookaside_repos = []

- 

- # GATHER - JSON

- # format: {variant_uid: {arch: package: [arch1, arch2, None (for any arch)]}}

- #gather_source_mapping = "/path/to/mapping.json"

- 

- 

- # CREATEREPO

- # TODO: checksum type - mandatory

- createrepo_c = True

- 

- 

- # BUILDINSTALL

- 

- 

- # PRODUCTIMG

- 

- 

- # CREATEISO

- create_optional_isos = False

- symlink_isos_to = None

- 

+ multilib_arches = ["ppc64", "x86_64", "s390x"]

+ multilib_methods = ["devel", "runtime"]

  

- # fomat: [(variant_uid_regex, {arch|*: [packages]})]

  additional_packages = [

      ('^Server$', {

          '*': [

- #            'dummy-lvm2-devel',

               'dummy-libtool',

          ],

      }),
@@ -121,14 +80,10 @@ 

      }),

  ]

  

- 

- # format: {arch|*: [packages]}

  multilib_blacklist = {

      "*": [

          "kernel-devel",

          "httpd-devel",

-         "*",

- #        "dummy-glibc",

      ],

  }

  
@@ -137,3 +92,7 @@ 

          "dummy-glibc",

      ],

  }

+ 

+ 

+ # BUILDINSTALL

+ bootable = False

@@ -12,7 +12,7 @@ 

    </variant>

  

    <variant id="Gluster" name="Gluster Layered Product" type="layered-product">

-     <product name="Gluster" version="2.3" short="Gluster" />

+     <release name="Gluster" version="2.3" short="Gluster" />

      <arches>

        <arch>x86_64</arch>

      </arches>