Diff
127 commits, 65 files changed
+5461 -661

4.0.13 release
Dennis Gilmore • 7 years ago  
file modified
+1
@@ -10,3 +10,4 @@

  include tests/*

  include tests/data/*

  include tests/data/*/*

+ include tests/fixtures/*

file added
+36
@@ -0,0 +1,36 @@

+ # Pungi

+ 

+ *Pungi* is a distribution compose tool.

+ 

+ Composes are release snapshots that contain release deliverables such as:

+ 

+ - installation trees

+     - RPMs

+     - repodata

+     - comps

+ - (bootable) ISOs

+ - kickstart trees

+     - anaconda images

+     - images for PXE boot

+ 

+ 

+ ## Tool overview

+ 

+ *Pungi* consists of multiple separate executables backed by a common library.

+ 

+ The main entry-point is the `pungi-koji` script. It loads the compose

+ configuration and kicks off the process. Composing itself is done in phases.

+ Each phase is responsible for generating some artifacts on disk and updating

+ the `compose` object that is threaded through all the phases.

+ 

+ *Pungi* itself does not actually do that much. Most of the actual work is

+ delegated to separate executables. *Pungi* just makes sure that all the

+ commands are invoked in the appropriate order and with correct arguments. It

+ also moves the artifacts to correct locations.

+ 

+ 

+ ## Links

+ 

+ - Upstream GIT: https://pagure.io/pungi/

+ - Issue tracker: https://pagure.io/pungi/issues

+ - Questions can be asked on *#fedora-releng* IRC channel on FreeNode

file modified
+1 -2
@@ -200,8 +200,7 @@

      today = time.strftime('%Y%m%d', time.localtime())

  

      def get_arguments(config):

-         parser = OptionParser("%prog [--help] [options]", version="%prog 4.0.5")

- 

+         parser = OptionParser("%prog [--help] [options]", version="%prog 4.0.17")

          def set_config(option, opt_str, value, parser, config):

              config.set('pungi', option.dest, value)

  

@@ -0,0 +1,100 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ from __future__ import print_function

+ 

+ import argparse

+ import kobo.conf

+ import os

+ import sys

+ import tempfile

+ import contextlib

+ import shutil

+ 

+ here = sys.path[0]

+ if here != '/usr/bin':

+     # Git checkout

+     sys.path[0] = os.path.dirname(here)

+ 

+ import pungi.compose

+ import pungi.phases

+ 

+ 

+ class ValidationCompose(pungi.compose.Compose):

+     def __init__(self, conf, has_old):

+         self.conf = conf

+         self._logger = None

+         self.just_phases = []

+         self.skip_phases = []

+         self.has_old_composes = has_old

+ 

+     @property

+     def old_composes(self):

+         return '/dummy' if self.has_old_composes else None

+ 

+ 

+ @contextlib.contextmanager

+ def in_temp_dir():

+     tempdir = tempfile.mkdtemp()

+     yield tempdir

+     shutil.rmtree(tempdir)

+ 

+ 

+ def run(config, topdir, has_old):

+     conf = kobo.conf.PyConfigParser()

+     conf.load_from_file(config)

+ 

+     compose = ValidationCompose(conf, has_old)

+ 

+     pkgset_phase = pungi.phases.PkgsetPhase(compose)

+     phases = [

+         pungi.phases.InitPhase(compose),

+         pungi.phases.BuildinstallPhase(compose),

+         pkgset_phase,

+         pungi.phases.GatherPhase(compose, pkgset_phase),

+         pungi.phases.ExtraFilesPhase(compose, pkgset_phase),

+         pungi.phases.CreaterepoPhase(compose),

+         pungi.phases.OstreeInstallerPhase(compose),

+         pungi.phases.OSTreePhase(compose),

+         pungi.phases.ProductimgPhase(compose, pkgset_phase),

+         pungi.phases.CreateisoPhase(compose),

+         pungi.phases.LiveImagesPhase(compose),

+         pungi.phases.LiveMediaPhase(compose),

+         pungi.phases.ImageBuildPhase(compose),

+         pungi.phases.ImageChecksumPhase(compose),

+         pungi.phases.TestPhase(compose),

+     ]

+ 

+     errors = []

+     for phase in phases:

+         if phase.skip():

+             continue

+         try:

+             phase.validate()

+         except ValueError as ex:

+             for i in str(ex).splitlines():

+                 errors.append("%s: %s" % (phase.name.upper(), i))

+ 

+     return errors

+ 

+ 

+ def main(args=None):

+     parser = argparse.ArgumentParser()

+     parser.add_argument('config', metavar='CONFIG',

+                         help='configuration file to validate')

+     parser.add_argument('--old-composes', action='store_true',

+                         help='indicate if pungi-koji will be run with --old-composes option')

+     opts = parser.parse_args(args)

+ 

+     with in_temp_dir() as topdir:

+         errors = run(opts.config, topdir, opts.old_composes)

+ 

+     for msg in errors:

+         print(msg)

+ 

+     return bool(errors)

+ 

+ 

+ if __name__ == '__main__':

+     if main():

+         sys.exit(1)

file added
+15
@@ -0,0 +1,15 @@

+ #!/usr/bin/env python2

+ # -*- coding: utf-8 -*-

+ 

+ import os

+ import sys

+ 

+ here = sys.path[0]

+ if here != '/usr/bin':

+     # Git checkout

+     sys.path[0] = os.path.dirname(here)

+ 

+ import pungi.createiso

+ 

+ if __name__ == '__main__':

+     pungi.createiso.main()

file modified
+14 -7
@@ -149,11 +149,6 @@

          parser.error("please specify a config")

      opts.config = os.path.abspath(opts.config)

  

-     # check if all requirements are met

-     import pungi.checks

-     if not pungi.checks.check():

-         sys.exit(1)

- 

      import kobo.conf

      import kobo.log

      import productmd.composeinfo
@@ -173,6 +168,12 @@

      conf = kobo.conf.PyConfigParser()

      conf.load_from_file(opts.config)

  

+     # check if all requirements are met

+     import pungi.checks

+     if not pungi.checks.check(conf):

+         sys.exit(1)

+     pungi.checks.check_umask(logger)

+ 

      if opts.target_dir:

          compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)

      else:
@@ -191,7 +192,6 @@

                        logger=logger,

                        notifier=notifier)

      notifier.compose = compose

-     kobo.log.add_file_logger(logger, compose.paths.log.log_file("global", "pungi.log"))

      COMPOSE = compose

      try:

          run_compose(compose)
@@ -226,6 +226,8 @@

      gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)

      extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)

      createrepo_phase = pungi.phases.CreaterepoPhase(compose)

+     ostree_installer_phase = pungi.phases.OstreeInstallerPhase(compose)

+     ostree_phase = pungi.phases.OSTreePhase(compose)

      productimg_phase = pungi.phases.ProductimgPhase(compose, pkgset_phase)

      createiso_phase = pungi.phases.CreateisoPhase(compose)

      liveimages_phase = pungi.phases.LiveImagesPhase(compose)
@@ -239,7 +241,7 @@

                    buildinstall_phase, productimg_phase, gather_phase,

                    extrafiles_phase, createiso_phase, liveimages_phase,

                    livemedia_phase, image_build_phase, image_checksum_phase,

-                   test_phase):

+                   test_phase, ostree_phase, ostree_installer_phase):

          if phase.skip():

              continue

          try:
@@ -320,6 +322,9 @@

      if not buildinstall_phase.skip():

          buildinstall_phase.copy_files()

  

+     ostree_phase.start()

+     ostree_phase.stop()

+ 

      # PRODUCTIMG phase

      productimg_phase.start()

      productimg_phase.stop()
@@ -342,11 +347,13 @@

      liveimages_phase.start()

      image_build_phase.start()

      livemedia_phase.start()

+     ostree_installer_phase.start()

  

      createiso_phase.stop()

      liveimages_phase.stop()

      image_build_phase.stop()

      livemedia_phase.stop()

+     ostree_installer_phase.stop()

  

      image_checksum_phase.start()

      image_checksum_phase.stop()

@@ -0,0 +1,15 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ import os

+ import sys

+ 

+ here = sys.path[0]

+ if here != '/usr/bin':

+     sys.path.insert(0, os.path.dirname(here))

+ 

+ from pungi import ostree

+ 

+ 

+ if __name__ == '__main__':

+     ostree.main()

@@ -0,0 +1,20 @@

+ #!/usr/bin/env python3

+ 

+ # This needs to work with Python 3 as pylorax only provides the find_templates

+ # function in recent builds that are not provided for Python 2.7.

+ #

+ # This script will print a location of lorax templates. If it fails to import

+ # pylorax, or the find_templates function does not exist, the first command

+ # line argument will be printed instead.

+ 

+ import sys

+ 

+ if len(sys.argv) != 2:

+     print('Usage: {} FALLBACK'.format(sys.argv[0]), file=sys.stderr)

+     sys.exit(1)

+ 

+ try:

+     import pylorax

+     print(pylorax.find_templates())

+ except (ImportError, AttributeError):

+     print(sys.argv[1])

file modified
+2 -2
@@ -44,7 +44,7 @@

  

  # General information about the project.

  project = u'Pungi'

- copyright = u'2015, Red Hat, Inc.'

+ copyright = u'2016, Red Hat, Inc.'

  

  # The version info for the project you're documenting, acts as replacement for

  # |version| and |release|, also used in various other places throughout the
@@ -53,7 +53,7 @@

  # The short X.Y version.

  version = '4.0'

  # The full version, including alpha/beta/rc tags.

- release = '4.0'

+ release = '4.0.17'

  

  # The language for content autogenerated by Sphinx. Refer to documentation

  # for a list of supported languages.

file modified
+241 -30
@@ -98,6 +98,10 @@

  **base_product_version**

      (*str*) -- base product **major** version

  

+ **base_product_type** = "ga"

+     (*str*) -- base product type, "ga", "updates" etc., for full list see

+     documentation of *productmd*.

+ 

  

  Example

  -------
@@ -113,6 +117,12 @@

      base_product_short = "Fedora"

      base_product_version = "23"

  

+ **tree_arches**

+     ([*str*]) -- list of architectures which should be included; if undefined, all architectures from variants.xml will be included

+ 

+ **tree_variants**

+     ([*str*]) -- list of variants which should be included; if undefined, all variants from variants.xml will be included

+ 

  

  General Settings

  ================
@@ -136,6 +146,8 @@

       * live

       * image-build

       * live-media

+      * ostree

+      * ostree-installer

  

         .. note::

  
@@ -144,6 +156,16 @@

  

      Please note that ``*`` as a wildcard matches all architectures but ``src``.

  

+     tree_arches = ["x86_64"]

+     tree_variants = ["Server"]

+ 

+ **comps_filter_environments** [optional]

+     (*bool*) -- When set to ``False``, the comps files for variants will not

+     have their environments filtered to match the variant.

+ 

+ **keep_original_comps** [optional]

+     (*list*) -- List of variants for which the original comps file will be

+     copied without any modifications. Overwrites `comps_filter_environments`.

  

  Example

  -------
@@ -194,18 +216,18 @@

  -------

  

  There a couple common format specifiers available for both the options:

-  * compose_id

-  * release_short

-  * version

-  * date

-  * respin

-  * type

-  * type_suffix

-  * label

-  * label_major_version

-  * variant

-  * arch

-  * disc_type

+  * ``compose_id``

+  * ``release_short``

+  * ``version``

+  * ``date``

+  * ``respin``

+  * ``type``

+  * ``type_suffix``

+  * ``label``

+  * ``label_major_version``

+  * ``variant``

+  * ``arch``

+  * ``disc_type``

  

  **image_name_format** [optional]

      (*str*) -- Python's format string to serve as template for image names
@@ -214,23 +236,33 @@

      means ``createiso``, ``live_images`` and ``buildinstall``.

  

      Available extra keys are:

-      * disc_num

-      * suffix

+      * ``disc_num``

+      * ``suffix``

  

  **image_volid_formats** [optional]

      (*list*) -- A list of format strings for generating volume id.

  

      The extra available keys are:

-      * base_product_short

-      * base_product_version

+      * ``base_product_short``

+      * ``base_product_version``

  

  **image_volid_layered_product_formats** [optional]

-     (*list*) -- A listof format strings for generating volume id for layered

+     (*list*) -- A list of format strings for generating volume id for layered

      products. The keys available are the same as for ``image_volid_formats``.

  

  **volume_id_substitutions** [optional]

      (*dict*) -- A mapping of string replacements to shorten the volume id.

  

+ **disc_types** [optional]

+     (*dict*) -- A mapping for customizing ``disc_type`` used in image names.

+ 

+     Available keys are:

+      * ``boot`` -- for ``boot.iso`` images created in  *buildinstall* phase

+      * ``live`` -- for images created by *live_images* phase

+      * ``dvd`` -- for images created by *createiso* phase

+ 

+     Default values are the same as the keys.

+ 

  Example

  -------

  ::
@@ -251,6 +283,12 @@

          'TC': 'T',

      }

  

+     disc_types = {

+         'boot': 'netinst',

+         'live': 'Live',

+         'dvd': 'DVD',

+     }

+ 

  

  Signing

  =======
@@ -289,6 +327,32 @@

          signing_key_password_file = '~/password_for_fedora-24_key'

  

  

+ Git URLs

+ ========

+ 

+ In multiple places the config requires URL of a Git repository to download

+ kickstart file from. This URL is passed on to *Koji*. It is possible to which

+ commit to use using this syntax: ::

+ 

+     git://git.example.com/git/repo-name.git?#<rev_spec>

+ 

+ The ``<rev_spec>`` pattern can be replaced with actual commit SHA, a tag name,

+ ``HEAD`` to indicate that tip of default branch should be used or

+ ``origin/<branch_name>`` to use tip of arbitrary branch.

+ 

+ If the URL specifies a branch or ``HEAD``, *Pungi* will replace it with the

+ actual commit SHA. This will later show up in *Koji* tasks and help with

+ tracing what particular inputs were used.

+ 

+ .. note::

+ 

+     The ``origin`` must be specified because of the way *Koji* works with the

+     repository. It will clone the repository then switch to requested state

+     with ``git reset --hard REF``. Since no local branches are created, we need

+     to use full specification including the name of the remote.

+ 

+ 

+ 

  Createrepo Settings

  ===================

  
@@ -302,6 +366,10 @@

  **createrepo_c** = True

      (*bool*) -- use createrepo_c (True) or legacy createrepo (False)

  

+ **createrepo_deltas** = False

+     (*bool*) -- generate delta RPMs against an older compose. This needs to be

+     used together with `--old-composes`` command line argument.

+ 

  

  

  Example
@@ -435,10 +503,12 @@

       * ``yaboot``

  

  **additional_packages**

-     (*list*) -- additional packages to be included in a variant and architecture; format: [(variant_uid_regex, {arch|*: [package_globs]})]

+     (*list*) -- additional packages to be included in a variant and

+     architecture; format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``

  

  **filter_packages**

-     (*list*) -- packages to be excluded from a variant and architecture; format: [(variant_uid_regex, {arch|*: [package_globs]})]

+     (*list*) -- packages to be excluded from a variant and architecture;

+     format: ``[(variant_uid_regex, {arch|*: [package_globs]})]``

  

  **filter_system_release_packages**

      (*bool*) -- for each variant, figure out the best system release package
@@ -446,16 +516,22 @@

      one system release package. In such case, set this option to ``False``.

  

  **multilib_blacklist**

-     (*dict*) -- multilib blacklist; format: {arch|*: [package_globs]}

+     (*dict*) -- multilib blacklist; format: ``{arch|*: [package_globs]}``. The

+     patterns are tested with ``fnmatch``, so shell globbing is used (not

+     regular expression).

  

  **multilib_whitelist**

-     (*dict*) -- multilib blacklist; format: {arch|*: [package_globs]}

+     (*dict*) -- multilib blacklist; format: ``{arch|*: [package_names]}``. The

+     whitelist must contain exact package names; there are no wildcards or

+     pattern matching.

  

  **gather_lookaside_repos** = []

-     (*list*) -- lookaside repositories used for package gathering; format: [(variant_uid_regex, {arch|*: [repo_urls]})]

+     (*list*) -- lookaside repositories used for package gathering; format:

+     ``[(variant_uid_regex, {arch|*: [repo_urls]})]``

  

  **hashed_directories** = False

-     (*bool*) -- put packages into "hashed" directories, for example Packages/k/kernel-4.0.4-301.fc22.x86_64.rpm

+     (*bool*) -- put packages into "hashed" directories, for example

+     ``Packages/k/kernel-4.0.4-301.fc22.x86_64.rpm``

  

  

  Example
@@ -724,6 +800,29 @@

        * ``title`` (*str*)

        * ``install_tree_from`` (*str*) -- variant to take install tree from

  

+ If many of your media use the same value for one of ``ksurl``, ``release``,

+ ``target`` or ``version``, consider using these options to set the value in one

+ place and have all media inherit it.

+ 

+ **live_media_ksurl**

+     (*str*) -- Provides a fallback for media that do not specify ``ksurl`` in

+     the ``live_media`` block.

+ 

+ **live_media_release**

+     (*str*) -- Provides a fallback for media that do not specify ``release`` in

+     the ``live_media`` block. Please note that if you set this, there is no way

+     to unset it for a particular media. This is important if you want the

+     release generated by Koji.

+ 

+ **live_media_target**

+     (*str*) -- Provides a fallback for media that do not specify ``target`` in

+     the ``live_media`` block.

+ 

+ **live_media_version**

+     (*str*) -- Provides a fallback for media that do not specify ``version`` in

+     the ``live_media`` block.

+ 

+ 

  

  Image Build Settings

  ====================
@@ -841,6 +940,109 @@

      }

  

  

+ OSTree Settings

+ ===============

+ 

+ The ``ostree`` phase of *Pungi* can create ostree repositories in a Koji

+ runroot environment.

+ 

+ **ostree**

+     (*dict*) -- a variant/arch mapping of configuration. The format should be

+     ``[(variant_uid_regex, {arch|*: config_dict})]``.

+ 

+     The configuration dict for each variant arch pair must have these keys:

+ 

+     * ``treefile`` -- (*str*) Filename of configuration for ``rpm-ostree``.

+     * ``config_url`` -- (*str*) URL for Git repository with the ``treefile``.

+     * ``source_repo_from`` -- (*str*) Name of variant serving as source repository.

+     * ``ostree_repo`` -- (*str*) Where to put the ostree repository

+ 

+     These keys are optional:

+ 

+     * ``config_branch`` -- (*str*) Git branch of the repo to use. Defaults to

+       ``master``.

+ 

+ 

+ Example config

+ --------------

+ ::

+ 

+     ostree = [

+         ("^Atomic$", {

+             "x86_64": {

+                 "treefile": "fedora-atomic-docker-host.json",

+                 "config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",

+                 "source_repo_from": "Everything",

+                 "ostree_repo": "/mnt/koji/compose/atomic/Rawhide/"

+             }

+         })

+     ]

+ 

+ 

+ Ostree Installer Settings

+ =========================

+ 

+ The ``ostree_installer`` phase of *Pungi* can produce installer image bundling

+ an OSTree repository. This always runs in Koji as a ``runroot`` task.

+ 

+ **ostree_installer**

+     (*dict*) -- a variant/arch mapping of configuration. The format should be

+     ``[(variant_uid_regex, {arch|*: config_dict})]``.

+ 

+     The configuration dict for each variant arch pair must have this key:

+ 

+     * ``source_repo_from`` -- (*str*) Name of variant serving as source

+       repository or a URL pointing the the repo.

+ 

+     These keys are optional:

+ 

+     * ``release`` -- (*str*) Release value to set for the installer image. Set

+       to ``None`` to use the date.respin format.

+ 

+     These optional keys are passed to ``lorax`` to customize the build.

+ 

+     * ``installpkgs`` -- (*[str]*)

+     * ``add_template`` -- (*[str]*)

+     * ``add_arch_template`` -- (*[str]*)

+     * ``add_template_var`` -- (*[str]*)

+     * ``add_arch_template_var`` -- (*[str]*)

+     * ``template_repo`` -- (*str*) Git repository with extra templates.

+     * ``template_branch`` -- (*str*) Branch to use from ``template_repo``.

+ 

+     The templates can either be absolute paths, in which case they will be used

+     as configured; or they can be relative paths, in which case

+     ``template_repo`` needs to point to a Git repository from which to take the

+     templates.

+ 

+ 

+ Example config

+ --------------

+ ::

+ 

+     ostree_installer = [

+         ("^Atomic$", {

+             "x86_64": {

+                 "source_repo_from": "Everything",

+                 "release": None,

+                 "installpkgs": ["fedora-productimg-atomic"],

+                 "add_template": ["atomic-installer/lorax-configure-repo.tmpl"],

+                 "add_template_var": [

+                     "ostree_osname=fedora-atomic",

+                     "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",

+                 ],

+                 "add_arch_template": ["atomic-installer/lorax-embed-repo.tmpl"],

+                 "add_arch_template_var": [

+                     "ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",

+                     "ostree_osname=fedora-atomic",

+                     "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",

+                 ]

+                 'template_repo': 'https://git.fedorahosted.org/git/spin-kickstarts.git',

+                 'template_branch': 'f24',

+             }

+         })

+     ]

+ 

+ 

  Media Checksums Settings

  ========================

  
@@ -859,9 +1061,19 @@

      prefix to that name

  

      It is possible to use format strings that will be replace by actual values.

-     The allowed keys are ``%(release_showrt)s``, ``%(release_short)s``,

-     ``%(release_id)s``, ``%(variant)s``, ``%(version)s``, ``%(date)s``,

-     ``%(type_suffix)s`` and ``%(respin)s``

+     The allowed keys are:

+ 

+       * ``arch``

+       * ``compose_id``

+       * ``date``

+       * ``label``

+       * ``label_major_version``

+       * ``release_short``

+       * ``respin``

+       * ``type``

+       * ``type_suffix``

+       * ``version``

+       * ``version``

  

      For example, for Fedora the prefix should be

      ``%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s``.
@@ -871,13 +1083,12 @@

  ========================

  

  **translate_paths**

-     (*list*) -- list of paths to translate; format: [(path,translated_path)]

+     (*list*) -- list of paths to translate; format: ``[(path, translated_path)]``

  

  .. note::

      This feature becomes useful when you need to transform compose location

-     into e.g. a http repo which is can be passed to koji image-build.

-     Translation needs to be invoked by a function call in pungi.

-     os.path.normpath() is applied on both path and translated_path

+     into e.g. a HTTP repo which is can be passed to ``koji image-build``.

+     The ``path`` part is normalized via ``os.path.normpath()``.

      

  

  Example config

file modified
+2 -1
@@ -10,11 +10,12 @@

  

  git clone ssh://git@pagure.io/docs/pungi.git /tmp/pungi-doc

  pushd /tmp/pungi-doc

+ git checkout 4.0

  git rm -fr ./*

  cp -r /tmp/pungi/doc/_build/html/* ./

  git add .

  git commit -s -m "update rendered pungi docs"

- git push origin master

+ git push origin 4.0

  popd

  

  rm -rf  /tmp/pungi/ /tmp/pungi-doc/

file modified
+150 -5
@@ -1,5 +1,5 @@

  Name:           pungi

- Version:        4.0.5

+ Version:        4.0.17

  Release:        1%{?dist}

  Summary:        Distribution compose tool

  
@@ -8,6 +8,12 @@

  URL:            https://fedorahosted.org/pungi

  Source0:        https://fedorahosted.org/pungi/attachment/wiki/%{version}/%{name}-%{version}.tar.bz2

  BuildRequires:  python-nose, python-nose-cov, python-mock

+ BuildRequires:  python-devel, python-setuptools, python2-productmd

+ BuildRequires:  python-lockfile, kobo, kobo-rpmlib, python-kickstart, createrepo_c

+ BuildRequires:  python-lxml, libselinux-python, yum-utils, lorax

+ BuildRequires:  yum => 3.4.3-28, createrepo >= 0.4.11

+ BuildRequires:  gettext, git-core, cvs

+ 

  Requires:       createrepo >= 0.4.11

  Requires:       yum => 3.4.3-28

  Requires:       lorax >= 22.1
@@ -42,10 +48,12 @@

  %{__python} setup.py build

  

  %install

- rm -rf $RPM_BUILD_ROOT

- %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT

- %{__install} -d $RPM_BUILD_ROOT/var/cache/pungi

- %{__install} -d $RPM_BUILD_ROOT/%{_mandir}/man8

+ rm -rf %{buildroot}

+ %{__python} setup.py install -O1 --skip-build --root %{buildroot}

+ %{__install} -d %{buildroot}/var/cache/pungi

+ %{__install} -d %{buildroot}/%{_mandir}/man8

+ # this script has to be run by python3 and setup.py is too dumb

+ sed -i 's|/usr/bin/python$|/usr/bin/python3|' %{buildroot}/%{_bindir}/pungi-pylorax-find-templates

  

  %files

  %defattr(-,root,root,-)
@@ -65,6 +73,143 @@

  #cd tests && ./test_compose.sh

  

  %changelog

+ * Tue Jun 14 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.17-1

+ - Setup global log file before logging anything (lsedlar)

+ - [metadata] Correctly save final flag (lsedlar)

+ - Use unittest2 if available (lsedlar)

+ - Stop using str.format (lsedlar)

+ - The message attribute on exception is deprecated (lsedlar)

+ - [ostree] Rename duplicated test (lsedlar)

+ 

+ * Tue May 24 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.16-1

+ - [ostree-installer] Allow using external repos as source (lsedlar)

+ - [image-build] Allow using external install trees (lsedlar)

+ - Add type to base product for layered releases (lsedlar)

+ - [util] Resolve git+https URLs (lsedlar)

+ 

+ * Fri Apr 29 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.15-1

+ - [createiso] Add back running isohybrid on x86 disk images (dennis)

+ - [createiso] Remove chdir() (lsedlar)

+ - Pungi should log when it tries to publish notifications. (rbean)

+ - [createrepo] Use more verbose output (lsedlar)

+ - [ostree-installer] Drop filename setting (lsedlar)

+ - [ostree] Set each repo to point to current compose (lsedlar)

+ - [ostree-installer] Install ostree in runroot (lsedlar)

+ - [pkgset] Print more detailed logs when rpm is not found (lsedlar)

+ - [ostree-installer] Clone repo with templates (lsedlar)

+ 

+ * Fri Apr 08 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.14-1

+ - [ostree-installer] Copy all lorax outputs (lsedlar)

+ - [ostree] Log to stdout as well (lsedlar)

+ - [ostree-installer] Use separate directory for logs (lsedlar)

+ - [ostree-installer] Put lorax output into work dir (lsedlar)

+ - [ostree] Add test check for modified repo baseurl (lsedlar)

+ - [ostree] Move cloning repo back to compose box (lsedlar)

+ - [ostree] Mount ostree directory in koji (lsedlar)

+ 

+ * Wed Apr 06 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.13-1

+ - [ostree] Enable marking ostree phase as failable (lsedlar)

+ - [koji-wrapper] Initialize wrappers sequentially (lsedlar)

+ - [createiso] Simplify code, test phase (lsedlar)

+ - [createiso] Move runroot work to separate script (lsedlar)

+ - [ostree] Use explicit work directory (lsedlar)

+ - [ostree] Rename atomic to ostree (lsedlar)

+ - [ostree] Move cloning config repo to chroot (lsedlar)

+ - [ostree] Fix call to kobo.shortcuts.run (lsedlar)

+ - [atomic] Stop creating the os directory (lsedlar)

+ - [checksum] Add arch to file name (lsedlar)

+ - install scripts (dennis)

+ 

+ * Fri Apr 01 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.12-1

+ - Add a utility to validate config (lsedlar)

+ - [variants] Stop printing stuff to stderr unconditionally (lsedlar)

+ - Fix atomic/ostree config validations (lsedlar)

+ - [pungi-wrapper] Remove duplicated code (lsedlar)

+ - [checks] Add a check for too restrictive umask (lsedlar)

+ - [util] Remove umask manipulation from makedirs (lsedlar)

+ - Filter variants and architectures (lsedlar)

+ - Refactor checking for failable deliverables (lsedlar)

+ - [buildinstall] Do not crash on failure (lsedlar)

+ 

+ * Mon Mar 28 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.11-1

+ - Reuse helper in all tests (lsedlar)

+ - [atomic] Add atomic_installer phase (lsedlar)

+ - [ostree] Add ostree phase (lsedlar)

+ - [atomic] Add a script to create ostree repo (lsedlar)

+ - Add compose type to release for images (lsedlar)

+ - [image-build] Add traceback on failure (lsedlar)

+ - [image-build] Use subvariants in logging output (lsedlar)

+ - [live-media] Use subvariants in logging (lsedlar)

+ - Add tracebacks to all failable phases (lsedlar)

+ - ppc no longer needs magic bits in the iso (pbrobinson)

+ - [buildinstall] Add more debugging output (lsedlar)

+ - [metadata] Stop crashing on empty path from .treeinfo (lsedlar)

+ - [checksums] Add label to file name (lsedlar)

+ - image_build: fix subvariant handling (awilliam)

+ 

+ * Fri Mar 11 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.10-1

+ - Remove check for disc type (lsedlar)

+ - Update tests to match the subvariant (lsedlar)

+ - add 'subvariant' image property, create live/appliance names (awilliam)

+ - Simplify koji pkgset (lsedlar)

+ 

+ * Thu Mar 10 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.9-1

+ - [init] Update documentation (lsedlar)

+ - [init] Iterate over arches just once (lsedlar)

+ - [init] Remove duplicated checks for comps (lsedlar)

+ - [init] Break long lines (lsedlar)

+ - [init] Don't overwrite the same log file (lsedlar)

+ - [init] Add config option for keeping original comps (lsedlar)

+ - Add tests for the init phase (lsedlar)

+ - [checks] Test printing in all cases (lsedlar)

+ - [checks] Reduce code duplication (lsedlar)

+ - [checks] Relax check for genisoimage (lsedlar)

+ - [checks] Remove duplicate msgfmt line (lsedlar)

+ - [checks] Relax check for isohybrid command (lsedlar)

+ - [checks] Add tests for dependency checking (lsedlar)

+ - [checks] Don't always require jigdo (lsedlar)

+ - [pkgset] Respect inherit setting (lsedlar)

+ - specify that the 4.0 docs are for 4.0.8 (dennis)

+ - [live-media] Support release set to None globally (lsedlar)

+ - include tests/fixtures/* in the tarball (dennis)

+ 

+ * Tue Mar 08 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.8-1

+ - Add README (lsedlar)

+ - [doc] Fix formatting (lsedlar)

+ - [createiso] Add customizing disc type (lsedlar)

+ - [live-images] Add customizing disc type (lsedlar)

+ - [buildinstall] Add customizing disc type (lsedlar)

+ - [buildinstall] Rename method to not mention symlinks (lsedlar)

+ - [gather] Fix documentation of multilib white- and blacklist (lsedlar)

+ - [paths] Document and test translate_path (lsedlar)

+ - [createrepo] Compute delta RPMS against old compose (lsedlar)

+ - [util] Add function to search for old composes (lsedlar)

+ - [live-media] Add global settings (lsedlar)

+ - [live-media] Rename test case (lsedlar)

+ 

+ * Thu Mar 03 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.7-1

+ - Limit the variants with config option 'tree_variants' (dennis)

+ - [createrepo-wrapper] Fix --deltas argument (lsedlar)

+ - [createrepo-wrapper] Add tests (lsedlar)

+ - [koji-wrapper] Retry watching on connection errors (lsedlar)

+ - [createrepo-wrapper] Refactor code (lsedlar)

+ - [paths] Use variant.uid explicitly (lsedlar)

+ - [createrepo] Add tests (lsedlar)

+ - [createrepo] Refactor code (lsedlar)

+ - [image-build] Fix resolving git urls (lsedlar)

+ - [testphase] Don't run repoclosure for empty variants (lsedlar)

+ - [live-images] No manifest for appliances (lsedlar)

+ 

+ * Fri Feb 26 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.6-1

+ - push the 4.0 docs toa  4.0 branch (dennis)

+ - [live-images] Rename log file (lsedlar)

+ - [buildinstall] Use -dvd- in volume ids instead of -boot- (lsedlar)

+ - [buildinstall] Hardlink boot isos (lsedlar)

+ - [doc] Write documentation for kickstart Git URLs (lsedlar)

+ - [util] Resolve branches in git urls (lsedlar)

+ - [live-images] Fix crash when repo_from is not a list (lsedlar)

+ - [buildinstall] Don't copy files for empty variants (lsedlar)

+ 

  * Tue Feb 23 2016 Dennis Gilmore <dennis@ausil.us> - 4.0.5-1

  - [tests] Fix wrong checks in buildinstall tests (lsedlar)

  - [tests] Use temporary files for buildinstall (lsedlar)

file modified
+66 -16
@@ -16,21 +16,58 @@

  

  

  import os.path

- 

- 

+ import platform

+ 

+ 

+ def _will_productimg_run(conf):

+     return conf.get('productimg', False) and conf.get('bootable', False)

+ 

+ 

+ def is_jigdo_needed(conf):

+     return conf.get('create_jigdo', True)

+ 

+ 

+ def is_isohybrid_needed(conf):

+     """The isohybrid command is needed locally only for productimg phase and

+     createiso phase without runroot. If that is not going to run, we don't need

+     to check for it. Additionally, the syslinux package is only available on

+     x86_64 and i386.

+     """

+     runroot = conf.get('runroot', False)

+     if runroot and not _will_productimg_run(conf):

+         return False

+     if platform.machine() not in ('x86_64', 'i686', 'i386'):

+         msg = ('Not checking for /usr/bin/isohybrid due to current architecture. '

+                'Expect failures in productimg phase.')

+         print msg

+         return False

+     return True

+ 

+ 

+ def is_genisoimage_needed(conf):

+     """This is only needed locally for productimg and createiso without runroot.

+     """

+     runroot = conf.get('runroot', False)

+     if runroot and not _will_productimg_run(conf):

+         return False

+     return True

+ 

+ # The first element in the tuple is package name expected to have the

+ # executable (2nd element of the tuple). The last element is an optional

+ # function that should determine if the tool is required based on

+ # configuration.

  tools = [

-     ("isomd5sum", "/usr/bin/implantisomd5"),

-     ("isomd5sum", "/usr/bin/checkisomd5"),

-     ("jigdo", "/usr/bin/jigdo-lite"),

-     ("genisoimage", "/usr/bin/genisoimage"),

-     ("gettext", "/usr/bin/msgfmt"),

-     ("syslinux", "/usr/bin/isohybrid"),

-     ("yum-utils", "/usr/bin/createrepo"),

-     ("yum-utils", "/usr/bin/mergerepo"),

-     ("yum-utils", "/usr/bin/repoquery"),

-     ("git", "/usr/bin/git"),

-     ("cvs", "/usr/bin/cvs"),

-     ("gettext", "/usr/bin/msgfmt"),

+     ("isomd5sum", "/usr/bin/implantisomd5", None),

+     ("isomd5sum", "/usr/bin/checkisomd5", None),

+     ("jigdo", "/usr/bin/jigdo-lite", is_jigdo_needed),

+     ("genisoimage", "/usr/bin/genisoimage", is_genisoimage_needed),

+     ("gettext", "/usr/bin/msgfmt", None),

+     ("syslinux", "/usr/bin/isohybrid", is_isohybrid_needed),

+     ("yum-utils", "/usr/bin/createrepo", None),

+     ("yum-utils", "/usr/bin/mergerepo", None),

+     ("yum-utils", "/usr/bin/repoquery", None),

+     ("git", "/usr/bin/git", None),

+     ("cvs", "/usr/bin/cvs", None),

  ]

  

  imports = [
@@ -42,7 +79,7 @@

  ]

  

  

- def check():

+ def check(conf):

      fail = False

  

      # Check python modules
@@ -54,7 +91,10 @@

              fail = True

  

      # Check tools

-     for package, path in tools:

+     for package, path, test_if_required in tools:

+         if test_if_required and not test_if_required(conf):

+             # The config says this file is not required, so we won't even check it.

+             continue

          if not os.path.exists(path):

              print("Program '%s' doesn't exist. Install package '%s'." % (path, package))

              fail = True
@@ -62,6 +102,16 @@

      return not fail

  

  

+ def check_umask(logger):

+     """Make sure umask is set to something reasonable. If not, log a warning."""

+     mask = os.umask(0)

+     os.umask(mask)

+ 

+     if mask > 0o022:

+         logger.warning('Unusually strict umask detected (0%03o), '

+                        'expect files with broken permissions.', mask)

+ 

+ 

  def validate_options(conf, valid_options):

      errors = []

      for i in valid_options:

file modified
+26 -39
@@ -33,7 +33,7 @@

  from pungi.wrappers.variants import VariantsXmlParser

  from pungi.paths import Paths

  from pungi.wrappers.scm import get_file_from_scm

- from pungi.util import makedirs, get_arch_variant_data

+ from pungi.util import makedirs, get_arch_variant_data, get_format_substs

  from pungi.metadata import compose_to_composeinfo

  

  
@@ -51,6 +51,7 @@

          ci.base_product.name = conf["base_product_name"]

          ci.base_product.short = conf["base_product_short"]

          ci.base_product.version = conf["base_product_version"]

+         ci.base_product.type = conf.get("base_product_type", "ga").lower()

  

      ci.compose.label = compose_label

      ci.compose.type = compose_type
@@ -110,6 +111,10 @@

          # path definitions

          self.paths = Paths(self)

  

+         # Set up logging to file

+         if logger:

+             kobo.log.add_file_logger(logger, self.paths.log.log_file("global", "pungi.log"))

+ 

          # to provide compose_id, compose_date and compose_respin

          self.ci_base = ComposeInfo()

          self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))
@@ -202,9 +207,11 @@

              shutil.copy2(os.path.join(tmp_dir, file_name), variants_file)

              shutil.rmtree(tmp_dir)

  

-         file_obj = open(variants_file, "r")

          tree_arches = self.conf.get("tree_arches", None)

-         self.variants = VariantsXmlParser(file_obj, tree_arches).parse()

+         tree_variants = self.conf.get("tree_variants", None)

+         with open(variants_file, "r") as file_obj:

+             parser = VariantsXmlParser(file_obj, tree_arches, tree_variants, logger=self._logger)

+             self.variants = parser.parse()

  

          # populate ci_base with variants - needed for layered-products (compose_id)

          ####FIXME - compose_to_composeinfo is no longer needed and has been
@@ -215,24 +222,17 @@

      def get_variants(self, types=None, arch=None, recursive=False):

          result = []

          types = types or ["variant", "optional", "addon", "layered-product"]

-         for i in self.variants.values():

-             if i.type in types:

-                 if arch and arch not in i.arches:

-                     continue

+         for i in self.variants.itervalues():

+             if i.type in types and (not arch or arch in i.arches):

                  result.append(i)

              result.extend(i.get_variants(types=types, arch=arch, recursive=recursive))

          return sorted(set(result))

  

      def get_arches(self):

          result = set()

-         tree_arches = self.conf.get("tree_arches", None)

          for variant in self.get_variants():

              for arch in variant.arches:

-                 if tree_arches:

-                     if arch in tree_arches:

-                         result.add(arch)

-                 else:

-                     result.add(arch)

+                 result.add(arch)

          return sorted(result)

  

      @property
@@ -275,25 +275,6 @@

              return

          return open(self.status_file, "r").read().strip()

  

-     def get_format_substs(self, **kwargs):

-         """Return a dict of basic format substitutions.

- 

-         Any kwargs will be added as well.

-         """

-         substs = {

-             'compose_id': self.compose_id,

-             'release_short': self.ci_base.release.short,

-             'version': self.ci_base.release.version,

-             'date': self.compose_date,

-             'respin': self.compose_respin,

-             'type': self.compose_type,

-             'type_suffix': self.compose_type_suffix,

-             'label': self.compose_label,

-             'label_major_version': self.compose_label_major_version,

-         }

-         substs.update(kwargs)

-         return substs

- 

      def get_image_name(self, arch, variant, disc_type='dvd',

                         disc_num=1, suffix='.iso', format=None):

          """Create a filename for image with given parameters.
@@ -306,8 +287,6 @@

          if arch == "src":

              arch = "source"

  

-         if disc_type not in ("cd", "dvd", "ec2", "live", "boot"):

-             raise RuntimeError("Unsupported disc type: %s" % disc_type)

          if disc_num:

              disc_num = int(disc_num)

          else:
@@ -317,11 +296,12 @@

              variant_uid = variant.parent.uid

          else:

              variant_uid = variant.uid

-         args = self.get_format_substs(variant=variant_uid,

-                                       arch=arch,

-                                       disc_type=disc_type,

-                                       disc_num=disc_num,

-                                       suffix=suffix)

+         args = get_format_substs(self,

+                                  variant=variant_uid,

+                                  arch=arch,

+                                  disc_type=disc_type,

+                                  disc_num=disc_num,

+                                  suffix=suffix)

          try:

              return format % args

          except KeyError as err:
@@ -339,3 +319,10 @@

              self.failed_deliverables.setdefault(variant_uid, {}).setdefault(arch, []).append(deliverable)

              return True

          return False

+ 

+     @property

+     def image_release(self):

+         """Generate a value to pass to Koji as image release. This includes

+         date, compose type and respin."""

+         return '%s%s.%s' % (self.compose_date, self.ci_base.compose.type_suffix,

+                             self.compose_respin)

file added
+113
@@ -0,0 +1,113 @@

+ # -*- coding: utf-8 -*-

+ 

+ import argparse

+ import os

+ from kobo import shortcuts

+ 

+ from .wrappers.iso import IsoWrapper

+ from .wrappers.jigdo import JigdoWrapper

+ 

+ 

+ def find_templates(fallback):

+     """

+     Helper for finding lorax templates. The called program needs to run with

+     Python 3, while the rest of this script only supports Python 2.

+     """

+     _, output = shortcuts.run(['pungi-pylorax-find-templates', fallback],

+                               stdout=True, show_cmd=True)

+     return output.strip()

+ 

+ 

+ def make_image(iso, opts):

+     mkisofs_kwargs = {}

+ 

+     if opts.buildinstall_method:

+         if opts.buildinstall_method == 'lorax':

+             dir = find_templates('/usr/share/lorax')

+             mkisofs_kwargs["boot_args"] = iso.get_boot_options(

+                 opts.arch, os.path.join(dir, 'config_files/ppc'))

+         elif opts.buildinstall_method == 'buildinstall':

+             mkisofs_kwargs["boot_args"] = iso.get_boot_options(

+                 opts.arch, "/usr/lib/anaconda-runtime/boot")

+ 

+     # ppc(64) doesn't seem to support utf-8

+     if opts.arch in ("ppc", "ppc64", "ppc64le"):

+         mkisofs_kwargs["input_charset"] = None

+ 

+     cmd = iso.get_mkisofs_cmd(opts.iso_name, None, volid=opts.volid,

+                               exclude=["./lost+found"],

+                               graft_points=opts.graft_points, **mkisofs_kwargs)

+     shortcuts.run(cmd, stdout=True, show_cmd=True, workdir=opts.output_dir)

+ 

+ 

+ def implant_md5(iso, opts):

+     cmd = iso.get_implantisomd5_cmd(opts.iso_name, opts.supported)

+     shortcuts.run(cmd, stdout=True, show_cmd=True, workdir=opts.output_dir)

+ 

+ 

+ def run_isohybrid(iso, opts):

+     """If the image is bootable, it needs to include an MBR or GPT so that it

+     can actually be booted. This is done by running isohybrid on the image.

+     """

+     if opts.buildinstall_method and opts.arch in ["x86_64", "i386"]:

+         cmd = iso.get_isohybrid_cmd(opts.iso_name, opts.arch)

+         shortcuts.run(cmd, stdout=True, show_cmd=True, workdir=opts.output_dir)

+ 

+ 

+ def make_manifest(iso, opts):

+     shortcuts.run(iso.get_manifest_cmd(opts.iso_name), stdout=True,

+                   show_cmd=True, workdir=opts.output_dir)

+ 

+ 

+ def make_jigdo(opts):

+     jigdo = JigdoWrapper()

+     files = [

+         {

+             "path": opts.os_tree,

+             "label": None,

+             "uri": None,

+         }

+     ]

+     cmd = jigdo.get_jigdo_cmd(os.path.join(opts.output_dir, opts.iso_name),

+                               files, output_dir=opts.jigdo_dir,

+                               no_servers=True, report="noprogress")

+     shortcuts.run(cmd, stdout=True, show_cmd=True, workdir=opts.output_dir)

+ 

+ 

+ def run(opts):

+     iso = IsoWrapper()

+     make_image(iso, opts)

+     run_isohybrid(iso, opts)

+     implant_md5(iso, opts)

+     make_manifest(iso, opts)

+     if opts.jigdo_dir:

+         make_jigdo(opts)

+ 

+ 

+ def main(args=None):

+     parser = argparse.ArgumentParser()

+     parser.add_argument('--output-dir', required=True,

+                         help='where to put the final image')

+     parser.add_argument('--iso-name', required=True,

+                         help='filename for the created ISO image')

+     parser.add_argument('--volid', required=True,

+                         help='volume id for the image')

+     parser.add_argument('--graft-points', required=True,

+                         help='')

+     parser.add_argument('--buildinstall-method',

+                         choices=['lorax', 'buildinstall'],

+                         help='how was the boot.iso created for bootable products')

+     parser.add_argument('--arch', required=True,

+                         help='what arch are we building the ISO for')

+     parser.add_argument('--supported', action='store_true',

+                         help='supported flag for implantisomd5')

+     parser.add_argument('--jigdo-dir',

+                         help='where to put jigdo files')

+     parser.add_argument('--os-tree',

+                         help='where to put jigdo files')

+ 

+     opts = parser.parse_args(args)

+ 

+     if bool(opts.jigdo_dir) != bool(opts.os_tree):

+         parser.error('--jigdo-dir must be used together with --os-tree')

+     run(opts)

file modified
+6
@@ -71,6 +71,7 @@

      ci.compose.date = compose.compose_date

      ci.compose.respin = compose.compose_respin

      ci.compose.label = compose.compose_label

+     ci.compose.final = compose.supported

  

      # product

      ci.release.name = compose.conf["release_name"]
@@ -84,6 +85,7 @@

          ci.base_product.name = compose.conf["base_product_name"]

          ci.base_product.version = compose.conf["base_product_version"]

          ci.base_product.short = compose.conf["base_product_short"]

+         ci.base_product.type = compose.conf.get("base_product_type", "ga").lower()

  

      def dump_variant(variant, parent=None):

          var = productmd.composeinfo.Variant(ci)
@@ -293,6 +295,10 @@

                  ti.images.images[platform] = {}

                  ti.tree.platforms.add(platform)

                  for image, path in bi_ti.images.images[platform].items():

+                     if not path:

+                         # The .treeinfo file contains an image without a path.

+                         # We can't add that.

+                         continue

                      ti.images.images[platform][image] = path

                      ti.checksums.add(path, "sha256", root_dir=os_tree)

  

file modified
+2
@@ -57,6 +57,8 @@

          self._update_args(kwargs)

  

          with self.lock:

+             self.compose.log_debug("Notification: %r %r, %r" % (

+                 self.cmd, msg, kwargs))

              ret, _ = shortcuts.run((self.cmd, msg),

                                     stdin_data=json.dumps(kwargs),

                                     can_fail=True,

file added
+63
@@ -0,0 +1,63 @@

+ # -*- coding: utf-8 -*-

+ 

+ """

+ This module contains functions required by pungi-make-ostree.

+ It is expected to be runnable in Koji runroot.

+ """

+ 

+ import argparse

+ import os

+ from kobo import shortcuts

+ import errno

+ 

+ 

+ def ensure_dir(path):

+     try:

+         os.makedirs(path)

+     except OSError as err:

+         if err.errno != errno.EEXIST:

+             raise

+     return path

+ 

+ 

+ def make_log_file(log_dir, filename):

+     """Return path to log file with given name, if log_dir is set."""

+     if not log_dir:

+         return None

+     ensure_dir(log_dir)

+     return os.path.join(log_dir, '%s.log' % filename)

+ 

+ 

+ def init_ostree_repo(repo, log_dir=None):

+     """If the ostree repo does not exist, initialize it."""

+     log_file = make_log_file(log_dir, 'init-ostree-repo')

+     if not os.path.isdir(repo):

+         ensure_dir(repo)

+         shortcuts.run(['ostree', 'init', '--repo=%s' % repo, '--mode=archive-z2'],

+                       show_cmd=True, stdout=True, logfile=log_file)

+ 

+ 

+ def make_ostree_repo(repo, config, log_dir=None):

+     log_file = make_log_file(log_dir, 'create-ostree-repo')

+     shortcuts.run(['rpm-ostree', 'compose', 'tree', '--repo=%s' % repo, config],

+                   show_cmd=True, stdout=True, logfile=log_file)

+ 

+ 

+ def run(opts):

+     init_ostree_repo(opts.ostree_repo, log_dir=opts.log_dir)

+     make_ostree_repo(opts.ostree_repo, opts.treefile, log_dir=opts.log_dir)

+ 

+ 

+ def main(args=None):

+     parser = argparse.ArgumentParser()

+     parser.add_argument('--log-dir',

+                         help='where to log output')

+ 

+     parser.add_argument('ostree_repo', metavar='OSTREE_REPO',

+                         help='where to put the ostree repo')

+     parser.add_argument('--treefile', required=True,

+                         help='treefile for rpm-ostree')

+ 

+     opts = parser.parse_args(args)

+ 

+     run(opts)

file modified
+7 -3
@@ -25,6 +25,7 @@

  

  from pungi.util import makedirs

  

+ 

  def translate_path(compose, path):

      """

      @param compose - required for access to config
@@ -36,11 +37,14 @@

      for prefix, newvalue in mapping:

          prefix = os.path.normpath(prefix)

          if normpath.startswith(prefix):

-             # don't call os.path.normpath on result since that would break http:// -> http:/ and so on

-             return normpath.replace(prefix, newvalue, 1) # replace only 1 occurance

+             # We can't call os.path.normpath on result since it is not actually

+             # a path - http:// would get changed to  http:/ and so on.

+             # Only the first occurance should be replaced.

+             return normpath.replace(prefix, newvalue, 1)

  

      return normpath

  

+ 

  class Paths(object):

      def __init__(self, compose):

          paths_module_name = compose.conf.get("paths_module", None)
@@ -247,7 +251,7 @@

          Examples:

              work/x86_64/repo_package_list/Server.x86_64.rpm.conf

          """

-         file_name = "%s.%s" % (variant, arch)

+         file_name = "%s.%s" % (variant.uid, arch)

          if pkg_type is not None:

              file_name += ".%s" % pkg_type

          file_name += ".conf"

@@ -29,3 +29,5 @@

  from test import TestPhase  # noqa

  from image_checksum import ImageChecksumPhase    # noqa

  from livemedia_phase import LiveMediaPhase  # noqa

+ from ostree import OSTreePhase  # noqa

+ from ostree_installer import OstreeInstallerPhase  # noqa

file modified
+12
@@ -72,3 +72,15 @@

  

      def run(self):

          raise NotImplementedError

+ 

+ 

+ class ConfigGuardedPhase(PhaseBase):

+     """A phase that is skipped unless config option is set."""

+ 

+     def skip(self):

+         if super(ConfigGuardedPhase, self).skip():

+             return True

+         if not self.compose.conf.get(self.name):

+             self.compose.log_info("Config section '%s' was not found. Skipping." % self.name)

+             return True

+         return False

file modified
+33 -23
@@ -24,11 +24,12 @@

  import re

  

  from kobo.threads import ThreadPool, WorkerThread

- from kobo.shortcuts import run, relative_path

+ from kobo.shortcuts import run

  from productmd.images import Image

  

  from pungi.arch import get_valid_arches

  from pungi.util import get_buildroot_rpms, get_volid, get_arch_variant_data

+ from pungi.util import get_file_size, get_mtime, failable

  from pungi.wrappers.lorax import LoraxWrapper

  from pungi.wrappers.kojiwrapper import KojiWrapper

  from pungi.wrappers.iso import IsoWrapper
@@ -72,6 +73,11 @@

              "expected_types": [str],

              "optional": True,

          },

+         {

+             "name": "buildinstall_symlink",

+             "expected_types": [bool],

+             "optional": True,

+         },

      )

  

      def __init__(self, compose):
@@ -119,25 +125,26 @@

          version = self.compose.conf["release_version"]

          release = self.compose.conf["release_version"]

          buildinstall_method = self.compose.conf["buildinstall_method"]

+         disc_type = self.compose.conf.get('disc_types', {}).get('dvd', 'dvd')

  

          for arch in self.compose.get_arches():

              commands = []

  

              repo_baseurl = self.compose.paths.work.arch_repo(arch)

              output_dir = self.compose.paths.work.buildinstall_dir(arch)

-             volid = get_volid(self.compose, arch, disc_type="boot")

              buildarch = get_valid_arches(arch)[0]

  

              if buildinstall_method == "lorax":

                  for variant in self.compose.get_variants(arch=arch, types=['variant']):

                      if variant.is_empty:

                          continue

-                     volid = get_volid(self.compose, arch, variant=variant, disc_type="boot")

+                     volid = get_volid(self.compose, arch, variant=variant, disc_type=disc_type)

                      commands.append(

                          (variant,

                           self._get_lorax_cmd(repo_baseurl, output_dir, variant, arch, buildarch, volid))

                      )

              elif buildinstall_method == "buildinstall":

+                 volid = get_volid(self.compose, arch, disc_type=disc_type)

                  commands.append(

                      (None,

                       lorax.get_buildinstall_cmd(product,
@@ -160,11 +167,15 @@

  

      def copy_files(self):

          buildinstall_method = self.compose.conf["buildinstall_method"]

+         disc_type = self.compose.conf.get('disc_types', {}).get('dvd', 'dvd')

  

          # copy buildinstall files to the 'os' dir

          kickstart_file = get_kickstart_file(self.compose)

          for arch in self.compose.get_arches():

              for variant in self.compose.get_variants(arch=arch, types=["self", "variant"]):

+                 if variant.is_empty:

+                     continue

+ 

                  buildinstall_dir = self.compose.paths.work.buildinstall_dir(arch)

  

                  # Lorax runs per-variant, so we need to tweak the source path
@@ -178,9 +189,11 @@

                  os_tree = self.compose.paths.compose.os_tree(arch, variant)

                  # TODO: label is not used

                  label = ""

-                 volid = get_volid(self.compose, arch, variant, escape_spaces=False, disc_type="boot")

-                 tweak_buildinstall(buildinstall_dir, os_tree, arch, variant.uid, label, volid, kickstart_file)

-                 symlink_boot_iso(self.compose, arch, variant)

+                 volid = get_volid(self.compose, arch, variant, escape_spaces=False, disc_type=disc_type)

+                 msg = 'Copying results of buildinstall'

+                 with failable(self.compose, variant, arch, 'buildinstall', msg):

+                     tweak_buildinstall(buildinstall_dir, os_tree, arch, variant.uid, label, volid, kickstart_file)

+                     link_boot_iso(self.compose, arch, variant)

  

  

  def get_kickstart_file(compose):
@@ -303,10 +316,12 @@

      shutil.rmtree(tmp_dir)

  

  

- def symlink_boot_iso(compose, arch, variant):

+ def link_boot_iso(compose, arch, variant):

      if arch == "src":

          return

  

+     disc_type = compose.conf.get('disc_types', {}).get('boot', 'boot')

+ 

      symlink_isos_to = compose.conf.get("symlink_isos_to", None)

      os_tree = compose.paths.compose.os_tree(arch, variant)

      # TODO: find in treeinfo?
@@ -314,8 +329,8 @@

      if not os.path.isfile(boot_iso_path):

          return

  

-     msg = "Symlinking boot.iso (arch: %s, variant: %s)" % (arch, variant)

-     filename = compose.get_image_name(arch, variant, disc_type="boot",

+     msg = "Linking boot.iso (arch: %s, variant: %s)" % (arch, variant)

+     filename = compose.get_image_name(arch, variant, disc_type=disc_type,

                                        disc_num=None, suffix=".iso")

      new_boot_iso_path = compose.paths.compose.iso_path(arch, variant, filename,

                                                         symlink_to=symlink_isos_to)
@@ -329,9 +344,11 @@

          return

  

      compose.log_info("[BEGIN] %s" % msg)

-     # can't make a hardlink - possible cross-device link due to 'symlink_to' argument

-     symlink_target = relative_path(boot_iso_path, new_boot_iso_path)

-     os.symlink(symlink_target, new_boot_iso_path)

+     # Try to hardlink, and copy if that fails

+     try:

+         os.link(boot_iso_path, new_boot_iso_path)

+     except OSError:

+         shutil.copy2(boot_iso_path, new_boot_iso_path)

  

      iso = IsoWrapper()

      implant_md5 = iso.get_implanted_md5(new_boot_iso_path)
@@ -342,16 +359,16 @@

      run(iso.get_manifest_cmd(iso_name), workdir=iso_dir)

  

      img = Image(compose.im)

-     img.implant_md5 = iso.get_implanted_md5(new_boot_iso_path)

      img.path = new_boot_iso_relative_path

-     img.mtime = int(os.stat(new_boot_iso_path).st_mtime)

-     img.size = os.path.getsize(new_boot_iso_path)

+     img.mtime = get_mtime(new_boot_iso_path)

+     img.size = get_file_size(new_boot_iso_path)

      img.arch = arch

      img.type = "boot"

      img.format = "iso"

      img.disc_number = 1

      img.disc_count = 1

      img.bootable = True

+     img.subvariant = variant.name

      img.implant_md5 = implant_md5

      try:

          img.volume_id = iso.get_volume_id(new_boot_iso_path)
@@ -365,15 +382,8 @@

      def process(self, item, num):

          # The variant is None unless lorax is used as buildinstall method.

          compose, arch, variant, cmd = item

-         try:

+         with failable(compose, variant, arch, 'buildinstall'):

              self.worker(compose, arch, variant, cmd, num)

-         except Exception as exc:

-             if not compose.can_fail(variant, arch, 'buildinstall'):

-                 raise

-             else:

-                 self.pool.log_info(

-                     '[FAIL] Buildinstall for variant %s arch %s failed, but going on anyway.\n%s'

-                     % (variant.uid if variant else 'None', arch, exc))

  

      def worker(self, compose, arch, variant, cmd, num):

          runroot = compose.conf.get("runroot", False)

file modified
+99 -132
@@ -29,9 +29,9 @@

  from pungi.wrappers.iso import IsoWrapper

  from pungi.wrappers.createrepo import CreaterepoWrapper

  from pungi.wrappers.kojiwrapper import KojiWrapper

- from pungi.wrappers.jigdo import JigdoWrapper

  from pungi.phases.base import PhaseBase

- from pungi.util import makedirs, get_volid, get_arch_variant_data

+ from pungi.util import (makedirs, get_volid, get_arch_variant_data, failable,

+                         get_file_size, get_mtime)

  from pungi.media_split import MediaSplitter

  from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo

  
@@ -51,9 +51,24 @@

          PhaseBase.__init__(self, compose)

          self.pool = ThreadPool(logger=self.compose._logger)

  

+     def _find_rpms(self, path):

+         """Check if there are some RPMs in the path."""

+         for _, _, files in os.walk(path):

+             for fn in files:

+                 if fn.endswith(".rpm"):

+                     return True

+         return False

+ 

+     def _is_bootable(self, variant, arch):

+         if arch == "src":

+             return False

+         if variant.type != "variant":

+             return False

+         return self.compose.conf.get("bootable", False)

+ 

      def run(self):

-         iso = IsoWrapper(logger=self.compose._logger)

          symlink_isos_to = self.compose.conf.get("symlink_isos_to", None)

+         disc_type = self.compose.conf.get('disc_types', {}).get('dvd', 'dvd')

          deliverables = []

  

          commands = []
@@ -64,24 +79,16 @@

                      self.compose.log_info("Skipping createiso for %s.%s due to config option" % (variant, arch))

                      continue

  

-                 volid = get_volid(self.compose, arch, variant, disc_type='dvd')

+                 volid = get_volid(self.compose, arch, variant, disc_type=disc_type)

                  os_tree = self.compose.paths.compose.os_tree(arch, variant)

  

                  iso_dir = self.compose.paths.compose.iso_dir(arch, variant, symlink_to=symlink_isos_to)

                  if not iso_dir:

                      continue

  

-                 found = False

-                 for root, dirs, files in os.walk(os_tree):

-                     if found:

-                         break

-                     for fn in files:

-                         if fn.endswith(".rpm"):

-                             found = True

-                             break

- 

-                 if not found:

-                     self.compose.log_warning("No RPMs found for %s.%s, skipping ISO" % (variant, arch))

+                 if not self._find_rpms(os_tree):

+                     self.compose.log_warning("No RPMs found for %s.%s, skipping ISO"

+                                              % (variant.uid, arch))

                      continue

  

                  split_iso_data = split_iso(self.compose, arch, variant)
@@ -90,39 +97,23 @@

                  for disc_num, iso_data in enumerate(split_iso_data):

                      disc_num += 1

  

-                     # XXX: hardcoded disc_type

-                     filename = self.compose.get_image_name(arch, variant,

-                                                            disc_type="dvd",

-                                                            disc_num=disc_num)

-                     iso_path = self.compose.paths.compose.iso_path(arch,

-                                                                    variant,

-                                                                    filename,

-                                                                    symlink_to=symlink_isos_to)

-                     relative_iso_path = self.compose.paths.compose.iso_path(arch,

-                                                                             variant,

-                                                                             filename,

-                                                                             create_dir=False,

-                                                                             relative=True)

+                     filename = self.compose.get_image_name(

+                         arch, variant, disc_type=disc_type, disc_num=disc_num)

+                     iso_path = self.compose.paths.compose.iso_path(

+                         arch, variant, filename, symlink_to=symlink_isos_to)

                      if os.path.isfile(iso_path):

                          self.compose.log_warning("Skipping mkisofs, image already exists: %s" % iso_path)

                          continue

-                     iso_name = os.path.basename(iso_path)

                      deliverables.append(iso_path)

  

-                     graft_points = prepare_iso(self.compose, arch, variant, disc_num=disc_num, disc_count=disc_count, split_iso_data=iso_data)

+                     graft_points = prepare_iso(self.compose, arch, variant,

+                                                disc_num=disc_num, disc_count=disc_count,

+                                                split_iso_data=iso_data)

  

-                     bootable = self.compose.conf.get("bootable", False)

-                     if arch == "src":

-                         bootable = False

-                     if variant.type != "variant":

-                         bootable = False

+                     bootable = self._is_bootable(variant, arch)

  

                      cmd = {

-                         "arch": arch,

-                         "variant": variant,

                          "iso_path": iso_path,

-                         "relative_iso_path": relative_iso_path,

-                         "build_arch": arch,

                          "bootable": bootable,

                          "cmd": [],

                          "label": "",  # currently not used
@@ -131,64 +122,37 @@

                      }

  

                      if os.path.islink(iso_dir):

-                         cmd["mount"] = os.path.abspath(os.path.join(os.path.dirname(iso_dir), os.readlink(iso_dir)))

+                         cmd["mount"] = os.path.abspath(os.path.join(os.path.dirname(iso_dir),

+                                                                     os.readlink(iso_dir)))

+ 

+                     cmd['cmd'] = [

+                         'pungi-createiso',

+                         '--output-dir=%s' % iso_dir,

+                         '--iso-name=%s' % filename,

+                         '--volid=%s' % volid,

+                         '--graft-points=%s' % graft_points,

+                         '--arch=%s' % arch,

+                     ]

  

-                     chdir_cmd = "cd %s" % pipes.quote(iso_dir)

-                     cmd["cmd"].append(chdir_cmd)

+                     if bootable:

+                         cmd['cmd'].append(

+                             '--buildinstall-method=%s' % self.compose.conf['buildinstall_method']

+                         )

  

-                     mkisofs_kwargs = {}

+                     if self.compose.supported:

+                         cmd['cmd'].append('--supported')

  

-                     if bootable:

-                         buildinstall_method = self.compose.conf["buildinstall_method"]

-                         if buildinstall_method == "lorax":

-                             # TODO: $arch instead of ppc

-                             mkisofs_kwargs["boot_args"] = iso.get_boot_options(arch, "/usr/share/lorax/config_files/ppc")

-                         elif buildinstall_method == "buildinstall":

-                             mkisofs_kwargs["boot_args"] = iso.get_boot_options(arch, "/usr/lib/anaconda-runtime/boot")

- 

-                     # ppc(64) doesn't seem to support utf-8

-                     if arch in ("ppc", "ppc64", "ppc64le"):

-                         mkisofs_kwargs["input_charset"] = None

- 

-                     mkisofs_cmd = iso.get_mkisofs_cmd(iso_name, None, volid=volid, exclude=["./lost+found"], graft_points=graft_points, **mkisofs_kwargs)

-                     mkisofs_cmd = " ".join([pipes.quote(i) for i in mkisofs_cmd])

-                     cmd["cmd"].append(mkisofs_cmd)

- 

-                     if bootable and arch == "x86_64":

-                         isohybrid_cmd = "isohybrid --uefi %s" % pipes.quote(iso_name)

-                         cmd["cmd"].append(isohybrid_cmd)

-                     elif bootable and arch == "i386":

-                         isohybrid_cmd = "isohybrid %s" % pipes.quote(iso_name)

-                         cmd["cmd"].append(isohybrid_cmd)

- 

-                     # implant MD5SUM to iso

-                     isomd5sum_cmd = iso.get_implantisomd5_cmd(iso_name, self.compose.supported)

-                     isomd5sum_cmd = " ".join([pipes.quote(i) for i in isomd5sum_cmd])

-                     cmd["cmd"].append(isomd5sum_cmd)

- 

-                     # create iso manifest

-                     cmd["cmd"].append(iso.get_manifest_cmd(iso_name))

- 

-                     # create jigdo

-                     create_jigdo = self.compose.conf.get("create_jigdo", True)

-                     if create_jigdo:

-                         jigdo = JigdoWrapper(logger=self.compose._logger)

+                     if self.compose.conf.get('create_jigdo', True):

                          jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)

-                         files = [

-                             {

-                                 "path": os_tree,

-                                 "label": None,

-                                 "uri": None,

-                             }

-                         ]

-                         jigdo_cmd = jigdo.get_jigdo_cmd(iso_path, files, output_dir=jigdo_dir, no_servers=True, report="noprogress")

-                         jigdo_cmd = " ".join([pipes.quote(i) for i in jigdo_cmd])

-                         cmd["cmd"].append(jigdo_cmd)

- 

-                     cmd["cmd"] = " && ".join(cmd["cmd"])

+                         cmd['cmd'].extend([

+                             '--jigdo-dir=%s' % jigdo_dir,

+                             '--os-tree=%s' % os_tree,

+                         ])

+ 

                      commands.append((cmd, variant, arch))

  

-         self.compose.notifier.send('createiso-targets', deliverables=deliverables)

+         if self.compose.notifier:

+             self.compose.notifier.send('createiso-targets', deliverables=deliverables)

  

          for (cmd, variant, arch) in commands:

              self.pool.add(CreateIsoThread(self.pool))
@@ -203,7 +167,7 @@

  

  

  class CreateIsoThread(WorkerThread):

-     def fail(self, compose, cmd):

+     def fail(self, compose, cmd, variant, arch):

          compose.log_error("CreateISO failed, removing ISO: %s" % cmd["iso_path"])

          try:

              # remove incomplete ISO
@@ -211,44 +175,40 @@

              # TODO: remove jigdo & template

          except OSError:

              pass

-         compose.notifier.send('createiso-imagefail',

-                               file=cmd['iso_path'],

-                               arch=cmd['arch'],

-                               variant=str(cmd['variant']))

+         if compose.notifier:

+             compose.notifier.send('createiso-imagefail',

+                                   file=cmd['iso_path'],

+                                   arch=arch,

+                                   variant=str(variant))

  

      def process(self, item, num):

          compose, cmd, variant, arch = item

-         try:

-             self.worker(compose, cmd, num)

-         except Exception as exc:

-             if not compose.can_fail(variant, arch, 'iso'):

-                 raise

-             else:

-                 msg = ('[FAIL] Creating iso for variant %s, arch %s failed, but going on anyway.\n%s'

-                        % (variant.uid, arch, exc))

-                 self.pool.log_info(msg)

+         with failable(compose, variant, arch, 'iso', 'Creating ISO'):

+             self.worker(compose, cmd, variant, arch, num)

  

-     def worker(self, compose, cmd, num):

+     def worker(self, compose, cmd, variant, arch, num):

          mounts = [compose.topdir]

          if "mount" in cmd:

              mounts.append(cmd["mount"])

  

          runroot = compose.conf.get("runroot", False)

-         bootable = compose.conf.get("bootable", False)

-         log_file = compose.paths.log.log_file(cmd["arch"], "createiso-%s" % os.path.basename(cmd["iso_path"]))

+         bootable = cmd['bootable']

+         log_file = compose.paths.log.log_file(

+             arch, "createiso-%s" % os.path.basename(cmd["iso_path"]))

  

-         msg = "Creating ISO (arch: %s, variant: %s): %s" % (cmd["arch"], cmd["variant"], os.path.basename(cmd["iso_path"]))

+         msg = "Creating ISO (arch: %s, variant: %s): %s" % (

+             arch, variant, os.path.basename(cmd["iso_path"]))

          self.pool.log_info("[BEGIN] %s" % msg)

  

          if runroot:

              # run in a koji build root

-             packages = ["coreutils", "genisoimage", "isomd5sum", "jigdo", "strace", "lsof"]

+             packages = ["coreutils", "genisoimage", "isomd5sum", "jigdo", "pungi"]

+             extra_packages = {

+                 'lorax': ['lorax'],

+                 'buildinstall': ['anaconda'],

+             }

              if bootable:

-                 buildinstall_method = compose.conf["buildinstall_method"]

-                 if buildinstall_method == "lorax":

-                     packages += ["lorax"]

-                 elif buildinstall_method == "buildinstall":

-                     packages += ["anaconda"]

+                 packages.extend(extra_packages[compose.conf["buildinstall_method"]])

  

              runroot_channel = compose.conf.get("runroot_channel", None)

              runroot_tag = compose.conf["runroot_tag"]
@@ -259,15 +219,19 @@

              tag_info = koji_proxy.getTag(runroot_tag)

              tag_arches = tag_info["arches"].split(" ")

  

-             if not cmd["bootable"]:

+             build_arch = arch

+             if not bootable:

                  if "x86_64" in tag_arches:

                      # assign non-bootable images to x86_64 if possible

-                     cmd["build_arch"] = "x86_64"

-                 elif cmd["build_arch"] == "src":

+                     build_arch = "x86_64"

+                 elif build_arch == "src":

                      # pick random arch from available runroot tag arches

-                     cmd["build_arch"] = random.choice(tag_arches)

+                     build_arch = random.choice(tag_arches)

  

-             koji_cmd = koji_wrapper.get_runroot_cmd(runroot_tag, cmd["build_arch"], cmd["cmd"], channel=runroot_channel, use_shell=True, task_id=True, packages=packages, mounts=mounts)

+             koji_cmd = koji_wrapper.get_runroot_cmd(

+                 runroot_tag, build_arch, cmd["cmd"],

+                 channel=runroot_channel, use_shell=True, task_id=True,

+                 packages=packages, mounts=mounts)

  

              # avoid race conditions?

              # Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
@@ -275,44 +239,47 @@

  

              output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)

              if output["retcode"] != 0:

-                 self.fail(compose, cmd)

-                 raise RuntimeError("Runroot task failed: %s. See %s for more details." % (output["task_id"], log_file))

+                 self.fail(compose, cmd, variant, arch)

+                 raise RuntimeError("Runroot task failed: %s. See %s for more details."

+                                    % (output["task_id"], log_file))

  

          else:

              # run locally

              try:

                  run(cmd["cmd"], show_cmd=True, logfile=log_file)

              except:

-                 self.fail(compose, cmd)

+                 self.fail(compose, cmd, variant, arch)

                  raise

  

          iso = IsoWrapper()

  

          img = Image(compose.im)

-         img.path = cmd["relative_iso_path"]

-         img.mtime = int(os.stat(cmd["iso_path"]).st_mtime)

-         img.size = os.path.getsize(cmd["iso_path"])

-         img.arch = cmd["arch"]

+         img.path = cmd["iso_path"].replace(compose.paths.compose.topdir(), '').lstrip('/')

+         img.mtime = get_mtime(cmd["iso_path"])

+         img.size = get_file_size(cmd["iso_path"])

+         img.arch = arch

          # XXX: HARDCODED

          img.type = "dvd"

          img.format = "iso"

          img.disc_number = cmd["disc_num"]

          img.disc_count = cmd["disc_count"]

          img.bootable = cmd["bootable"]

+         img.subvariant = variant.uid

          img.implant_md5 = iso.get_implanted_md5(cmd["iso_path"])

          try:

              img.volume_id = iso.get_volume_id(cmd["iso_path"])

          except RuntimeError:

              pass

-         compose.im.add(cmd["variant"].uid, cmd["arch"], img)

+         compose.im.add(variant.uid, arch, img)

          # TODO: supported_iso_bit

          # add: boot.iso

  

          self.pool.log_info("[DONE ] %s" % msg)

-         compose.notifier.send('createiso-imagedone',

-                               file=cmd['iso_path'],

-                               arch=cmd['arch'],

-                               variant=str(cmd['variant']))

+         if compose.notifier:

+             compose.notifier.send('createiso-imagedone',

+                                   file=cmd['iso_path'],

+                                   arch=arch,

+                                   variant=str(variant))

  

  

  def split_iso(compose, arch, variant):

file modified
+75 -47
@@ -29,9 +29,10 @@

  from kobo.threads import ThreadPool, WorkerThread

  from kobo.shortcuts import run, relative_path

  

- from pungi.wrappers.scm import get_dir_from_scm

- from pungi.wrappers.createrepo import CreaterepoWrapper

- from pungi.phases.base import PhaseBase

+ from ..wrappers.scm import get_dir_from_scm

+ from ..wrappers.createrepo import CreaterepoWrapper

+ from .base import PhaseBase

+ from ..util import find_old_compose

  

  import productmd.rpms

  
@@ -55,6 +56,11 @@

              "expected_values": ["sha256", "sha"],

          },

          {

+             "name": "createrepo_deltas",

+             "expected_types": [bool],

+             "optional": True,

+         },

+         {

              "name": "product_id",

              "expected_types": [dict],

              "optional": True,
@@ -70,61 +76,71 @@

          PhaseBase.__init__(self, compose)

          self.pool = ThreadPool(logger=self.compose._logger)

  

+     def validate(self):

+         errors = []

+         try:

+             super(CreaterepoPhase, self).validate()

+         except ValueError as exc:

+             errors = exc.message.split('\n')

+ 

+         if not self.compose.old_composes and 'createrepo_deltas' in self.compose.conf:

+             errors.append('Can not generate deltas without old compose')

+ 

+         if errors:

+             raise ValueError('\n'.join(errors))

+ 

      def run(self):

          get_productids_from_scm(self.compose)

          for i in range(3):

              self.pool.add(CreaterepoThread(self.pool))

  

-         for arch in self.compose.get_arches():

-             for variant in self.compose.get_variants(arch=arch):

-                 if variant.is_empty:

-                     continue

-                 self.pool.queue_put((self.compose, arch, variant, "rpm"))

-                 self.pool.queue_put((self.compose, arch, variant, "debuginfo"))

- 

          for variant in self.compose.get_variants():

              if variant.is_empty:

                  continue

              self.pool.queue_put((self.compose, None, variant, "srpm"))

+             for arch in variant.arches:

+                 self.pool.queue_put((self.compose, arch, variant, "rpm"))

+                 self.pool.queue_put((self.compose, arch, variant, "debuginfo"))

  

          self.pool.start()

  

  

  def create_variant_repo(compose, arch, variant, pkg_type):

-     if variant.is_empty:

+     types = {

+         'rpm': ('binary',

+                 lambda: compose.paths.compose.repository(arch=arch, variant=variant)),

+         'srpm': ('source',

+                  lambda: compose.paths.compose.repository(arch='src', variant=variant)),

+         'debuginfo': ('debug',

+                       lambda: compose.paths.compose.debug_repository(arch=arch, variant=variant)),

+     }

+ 

+     if variant.is_empty or (arch is None and pkg_type != 'srpm'):

          compose.log_info("[SKIP ] Creating repo (arch: %s, variant: %s): %s" % (arch, variant))

          return

  

      createrepo_c = compose.conf.get("createrepo_c", True)

      createrepo_checksum = compose.conf["createrepo_checksum"]

+     createrepo_deltas = compose.conf.get("createrepo_deltas", False)

      repo = CreaterepoWrapper(createrepo_c=createrepo_c)

-     if pkg_type == "srpm":

-         repo_dir_arch = compose.paths.work.arch_repo(arch="global")

-     else:

-         repo_dir_arch = compose.paths.work.arch_repo(arch=arch)

- 

-     if pkg_type == "rpm":

-         repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)

-     elif pkg_type == "srpm":

-         repo_dir = compose.paths.compose.repository(arch="src", variant=variant)

-     elif pkg_type == "debuginfo":

-         repo_dir = compose.paths.compose.debug_repository(arch=arch, variant=variant)

-     else:

-         raise ValueError("Unknown package type: %s" % pkg_type)

+     repo_dir_arch = compose.paths.work.arch_repo(arch='global' if pkg_type == 'srpm' else arch)

  

-     if not repo_dir:

-         return

+     try:

+         repo_dir = types[pkg_type][1]()

+     except KeyError:

+         raise ValueError("Unknown package type: %s" % pkg_type)

  

      msg = "Creating repo (arch: %s, variant: %s): %s" % (arch, variant, repo_dir)

  

      # HACK: using global lock

-     createrepo_lock.acquire()

-     if repo_dir in createrepo_dirs:

-         compose.log_warning("[SKIP ] Already in progress: %s" % msg)

-         createrepo_lock.release()

-         return

-     createrepo_dirs.add(repo_dir)

-     createrepo_lock.release()

+     # This is important when addons put packages into parent variant directory.

+     # There can't be multiple createrepo processes operating on the same

+     # directory.

+     with createrepo_lock:

+         if repo_dir in createrepo_dirs:

+             compose.log_warning("[SKIP ] Already in progress: %s" % msg)

+             return

+         createrepo_dirs.add(repo_dir)

  

      if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")):

          compose.log_warning("[SKIP ] %s" % msg)
@@ -140,32 +156,44 @@

      manifest.load(manifest_file)

  

      for rpms_arch, data in manifest.rpms[variant.uid].iteritems():

-         if arch is None and pkg_type != "srpm":

-             continue

          if arch is not None and arch != rpms_arch:

              continue

-         for srpm_nevra, srpm_data in data.items():

-             for rpm_nevra, rpm_data in srpm_data.items():

-                 if pkg_type == "rpm" and rpm_data["category"] != "binary":

-                     continue

-                 if pkg_type == "srpm" and rpm_data["category"] != "source":

-                     continue

-                 if pkg_type == "debuginfo" and rpm_data["category"] != "debug":

+         for srpm_data in data.itervalues():

+             for rpm_data in srpm_data.itervalues():

+                 if types[pkg_type][0] != rpm_data['category']:

                      continue

                  path = os.path.join(compose.topdir, "compose", rpm_data["path"])

                  rel_path = relative_path(path, repo_dir.rstrip("/") + "/")

                  rpms.add(rel_path)

  

      file_list = compose.paths.work.repo_package_list(arch, variant, pkg_type)

-     f = open(file_list, "w")

-     for rel_path in sorted(rpms):

-         f.write("%s\n" % rel_path)

-     f.close()

+     with open(file_list, 'w') as f:

+         for rel_path in sorted(rpms):

+             f.write("%s\n" % rel_path)

+ 

+     old_packages_dir = None

+     if createrepo_deltas:

+         old_compose_path = find_old_compose(

+             compose.old_composes,

+             compose.ci_base.release.short,

+             compose.ci_base.release.version,

+             compose.ci_base.base_product.short if compose.ci_base.release.is_layered else None,

+             compose.ci_base.base_product.version if compose.ci_base.release.is_layered else None

+         )

+         if not old_compose_path:

+             compose.log_info("No suitable old compose found in: %s" % compose.old_composes)

+         else:

+             rel_dir = relative_path(repo_dir, compose.topdir.rstrip('/') + '/')

+             old_packages_dir = os.path.join(old_compose_path, rel_dir)

  

      comps_path = None

      if compose.has_comps and pkg_type == "rpm":

          comps_path = compose.paths.work.comps(arch=arch, variant=variant)

-     cmd = repo.get_createrepo_cmd(repo_dir, update=True, database=True, skip_stat=True, pkglist=file_list, outputdir=repo_dir, workers=3, groupfile=comps_path, update_md_path=repo_dir_arch, checksum=createrepo_checksum)

+     cmd = repo.get_createrepo_cmd(repo_dir, update=True, database=True, skip_stat=True,

+                                   pkglist=file_list, outputdir=repo_dir, workers=3,

+                                   groupfile=comps_path, update_md_path=repo_dir_arch,

+                                   checksum=createrepo_checksum, deltas=createrepo_deltas,

+                                   oldpackagedirs=old_packages_dir)

      log_file = compose.paths.log.log_file(arch, "createrepo-%s" % variant)

      run(cmd, logfile=log_file, show_cmd=True)

  

file modified
+20 -24
@@ -5,7 +5,7 @@

  import time

  from kobo import shortcuts

  

- from pungi.util import get_variant_data, resolve_git_url, makedirs, get_mtime, get_file_size

+ from pungi.util import get_variant_data, resolve_git_url, makedirs, get_mtime, get_file_size, failable

  from pungi.phases.base import PhaseBase

  from pungi.linker import Linker

  from pungi.paths import translate_path
@@ -37,6 +37,8 @@

          dict.

          """

          install_tree_from = image_conf.pop('install_tree_from', variant.uid)

+         if '://' in install_tree_from:

+             return install_tree_from

          install_tree_source = self.compose.variants.get(install_tree_from)

          if not install_tree_source:

              raise RuntimeError(
@@ -81,7 +83,7 @@

      def _set_release(self, image_conf):

          """If release is set explicitly to None, replace it with date and respin."""

          if 'release' in image_conf and image_conf['release'] is None:

-             image_conf['release'] = '%s.%s' % (self.compose.compose_date, self.compose.compose_respin)

+             image_conf['release'] = self.compose.image_release

  

      def run(self):

          for variant in self.compose.get_variants():
@@ -100,7 +102,7 @@

                      continue

  

                  # Replace possible ambiguous ref name with explicit hash.

-                 if 'ksurl' in image_conf:

+                 if 'ksurl' in image_conf['image-build']:

                      image_conf["image-build"]['ksurl'] = resolve_git_url(image_conf["image-build"]['ksurl'])

  

                  image_conf["image-build"]["variant"] = variant
@@ -142,35 +144,29 @@

  

      def process(self, item, num):

          compose, cmd = item

-         try:

-             self.worker(num, compose, cmd)

-         except Exception as exc:

-             if not compose.can_fail(cmd["image_conf"]["image-build"]['variant'], '*', 'image-build'):

-                 raise

-             else:

-                 msg = ('[FAIL] image-build for variant %s failed, but going on anyway.\n%s'

-                        % (cmd['image_conf']['image-build']['variant'], exc))

-                 self.pool.log_info(msg)

- 

-     def worker(self, num, compose, cmd):

+         variant = cmd["image_conf"]["image-build"]["variant"]

+         subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)

+         with failable(compose, variant, '*', 'image-build'):

+             self.worker(num, compose, variant, subvariant, cmd)

+ 

+     def worker(self, num, compose, variant, subvariant, cmd):

          arches = cmd["image_conf"]["image-build"]['arches'].split(',')

          dash_arches = '-'.join(arches)

          log_file = compose.paths.log.log_file(

              dash_arches,

-             "imagebuild-%s-%s-%s" % (dash_arches,

-                                      cmd["image_conf"]["image-build"]["variant"],

+             "imagebuild-%s-%s-%s" % (variant.uid, subvariant,

                                       cmd["image_conf"]["image-build"]['format'].replace(",", "-"))

          )

-         msg = "Creating %s image (arches: %s, variant: %s)" % (cmd["image_conf"]["image-build"]["format"].replace(",", "-"),

-                                                                dash_arches,

-                                                                cmd["image_conf"]["image-build"]["variant"])

+         msg = ("Creating %s image (arches: %s, variant: %s, subvariant: %s)"

+                % (cmd["image_conf"]["image-build"]["format"].replace(",", "-"),

+                   dash_arches, variant, subvariant))

          self.pool.log_info("[BEGIN] %s" % msg)

  

          koji_wrapper = KojiWrapper(compose.conf["koji_profile"])

  

          # writes conf file for koji image-build

          self.pool.log_info("Writing image-build config for %s.%s into %s" % (

-             cmd["image_conf"]["image-build"]["variant"], dash_arches, cmd["conf_file"]))

+             variant, dash_arches, cmd["conf_file"]))

          koji_cmd = koji_wrapper.get_image_build_cmd(cmd["image_conf"],

                                                      conf_file_dest=cmd["conf_file"],

                                                      scratch=cmd['scratch'])
@@ -182,7 +178,8 @@

          self.pool.log_debug("build-image outputs: %s" % (output))

          if output["retcode"] != 0:

              self.fail(compose, cmd)

-             raise RuntimeError("ImageBuild task failed: %s. See %s for more details." % (output["task_id"], log_file))

+             raise RuntimeError("ImageBuild task failed: %s. See %s for more details."

+                                % (output["task_id"], log_file))

  

          # copy image to images/

          image_infos = []
@@ -229,8 +226,7 @@

              img.disc_number = 1     # We don't expect multiple disks

              img.disc_count = 1

              img.bootable = False

-             compose.im.add(variant=cmd["image_conf"]["image-build"]["variant"].uid,

-                            arch=image_info['arch'],

-                            image=img)

+             img.subvariant = subvariant

+             compose.im.add(variant=variant.uid, arch=image_info['arch'], image=img)

  

          self.pool.log_info("[DONE ] %s" % msg)

@@ -4,6 +4,7 @@

  from kobo import shortcuts

  

  from .base import PhaseBase

+ from ..util import get_format_substs

  

  

  MULTIPLE_CHECKSUMS_ERROR = (
@@ -66,28 +67,21 @@

              for arch in self.compose.im.images[variant]:

                  for image in self.compose.im.images[variant][arch]:

                      path = os.path.dirname(os.path.join(top_dir, image.path))

-                     images.setdefault((variant, path), set()).add(image)

+                     images.setdefault((variant, arch, path), set()).add(image)

          return images

  

-     def _get_base_filename(self, variant):

+     def _get_base_filename(self, variant, arch):

          base_checksum_name = self.compose.conf.get('media_checksum_base_filename', '')

          if base_checksum_name:

-             base_checksum_name = base_checksum_name % {

-                 'release_short': self.compose.ci_base.release.short,

-                 'release_id': self.compose.ci_base.release_id,

-                 'variant': variant,

-                 'version': self.compose.ci_base.release.version,

-                 'date': self.compose.compose_date,

-                 'type_suffix': self.compose.compose_type_suffix,

-                 'respin': self.compose.compose_respin,

-             }

+             substs = get_format_substs(self.compose, variant=variant, arch=arch)

+             base_checksum_name = base_checksum_name % substs

              base_checksum_name += '-'

          return base_checksum_name

  

      def run(self):

-         for (variant, path), images in self._get_images().iteritems():

+         for (variant, arch, path), images in self._get_images().iteritems():

              checksums = {}

-             base_checksum_name = self._get_base_filename(variant)

+             base_checksum_name = self._get_base_filename(variant, arch)

              for image in images:

                  filename = os.path.basename(image.path)

                  full_path = os.path.join(path, filename)

file modified
+37 -29
@@ -144,6 +144,12 @@

              "optional": True,

          },

  

+         {

+             "name": "keep_original_comps",

+             "expected_types": [list],

+             "optional": True,

+         },

+ 

  

      )

  
@@ -153,19 +159,21 @@

          return False

  

      def run(self):

-         # write global comps and arch comps

-         write_global_comps(self.compose)

-         for arch in self.compose.get_arches():

-             write_arch_comps(self.compose, arch)

- 

-         # create comps repos

-         for arch in self.compose.get_arches():

-             create_comps_repo(self.compose, arch)

- 

-         # write variant comps

-         for variant in self.compose.get_variants():

-             for arch in variant.arches:

-                 write_variant_comps(self.compose, arch, variant)

+         if self.compose.has_comps:

+             # write global comps and arch comps, create comps repos

+             write_global_comps(self.compose)

+             for arch in self.compose.get_arches():

+                 write_arch_comps(self.compose, arch)

+                 create_comps_repo(self.compose, arch)

+ 

+             # write variant comps

+             for variant in self.compose.get_variants():

+                 should_preserve = variant.uid in self.compose.conf.get('keep_original_comps', [])

+                 for arch in variant.arches:

+                     if should_preserve:

+                         copy_variant_comps(self.compose, arch, variant)

+                     else:

+                         write_variant_comps(self.compose, arch, variant)

  

          # download variants.xml / product.xml?

  
@@ -174,9 +182,6 @@

  

  

  def write_global_comps(compose):

-     if not compose.has_comps:

-         return

- 

      comps_file_global = compose.paths.work.comps(arch="global")

      msg = "Writing global comps file: %s" % comps_file_global

  
@@ -200,9 +205,6 @@

  

  

  def write_arch_comps(compose, arch):

-     if not compose.has_comps:

-         return

- 

      comps_file_arch = compose.paths.work.comps(arch=arch)

      msg = "Writing comps file for arch '%s': %s" % (arch, comps_file_arch)

  
@@ -211,13 +213,12 @@

          return

  

      compose.log_debug(msg)

-     run(["comps_filter", "--arch=%s" % arch, "--no-cleanup", "--output=%s" % comps_file_arch, compose.paths.work.comps(arch="global")])

+     run(["comps_filter", "--arch=%s" % arch, "--no-cleanup",

+          "--output=%s" % comps_file_arch,

+          compose.paths.work.comps(arch="global")])

  

  

  def write_variant_comps(compose, arch, variant):

-     if not compose.has_comps:

-         return

- 

      comps_file = compose.paths.work.comps(arch=arch, variant=variant)

      msg = "Writing comps file (arch: %s, variant: %s): %s" % (arch, variant, comps_file)

  
@@ -233,7 +234,9 @@

          return

  

      compose.log_debug(msg)

-     run(["comps_filter", "--arch=%s" % arch, "--keep-empty-group=conflicts", "--keep-empty-group=conflicts-%s" % variant.uid.lower(), "--output=%s" % comps_file, compose.paths.work.comps(arch="global")])

+     run(["comps_filter", "--arch=%s" % arch, "--keep-empty-group=conflicts",

+          "--keep-empty-group=conflicts-%s" % variant.uid.lower(),

+          "--output=%s" % comps_file, compose.paths.work.comps(arch="global")])

  

      comps = CompsWrapper(comps_file)

      comps.filter_groups(variant.groups)
@@ -242,10 +245,13 @@

      comps.write_comps()

  

  

- def create_comps_repo(compose, arch):

-     if not compose.has_comps:

-         return

+ def copy_variant_comps(compose, arch, variant):

+     global_comps = compose.paths.work.comps(arch="global")

+     comps_file = compose.paths.work.comps(arch=arch, variant=variant)

+     shutil.copy(global_comps, comps_file)

  

+ 

+ def create_comps_repo(compose, arch):

      createrepo_c = compose.conf.get("createrepo_c", True)

      createrepo_checksum = compose.conf["createrepo_checksum"]

      repo = CreaterepoWrapper(createrepo_c=createrepo_c)
@@ -256,6 +262,8 @@

          compose.log_warning("[SKIP ] %s" % msg)

      else:

          compose.log_info("[BEGIN] %s" % msg)

-         cmd = repo.get_createrepo_cmd(comps_repo, update=True, database=True, skip_stat=True, outputdir=comps_repo, groupfile=comps_path, checksum=createrepo_checksum)

-         run(cmd, logfile=compose.paths.log.log_file("global", "arch_repo"), show_cmd=True)

+         cmd = repo.get_createrepo_cmd(comps_repo, update=True, database=True, skip_stat=True,

+                                       outputdir=comps_repo, groupfile=comps_path,

+                                       checksum=createrepo_checksum)

+         run(cmd, logfile=compose.paths.log.log_file(arch, "comps_repo"), show_cmd=True)

          compose.log_info("[DONE ] %s" % msg)

file modified
+28 -21
@@ -22,13 +22,13 @@

  import shutil

  

  from kobo.threads import ThreadPool, WorkerThread

- from kobo.shortcuts import run, save_to_file

+ from kobo.shortcuts import run, save_to_file, force_list

  from productmd.images import Image

  

  from pungi.wrappers.kojiwrapper import KojiWrapper

  from pungi.wrappers.iso import IsoWrapper

  from pungi.phases.base import PhaseBase

- from pungi.util import get_arch_variant_data, resolve_git_url, makedirs, get_mtime, get_file_size

+ from pungi.util import get_arch_variant_data, resolve_git_url, makedirs, get_mtime, get_file_size, failable

  from pungi.paths import translate_path

  

  
@@ -106,13 +106,13 @@

  

          # additional repos

          repos.extend(data.get("additional_repos", []))

-         repos.extend(self._get_extra_repos(arch, variant, data.get('repo_from', [])))

+         repos.extend(self._get_extra_repos(arch, variant, force_list(data.get('repo_from', []))))

          return repos

  

      def _get_release(self, image_conf):

          """If release is set explicitly to None, replace it with date and respin."""

          if 'release' in image_conf and image_conf['release'] is None:

-             return '%s.%s' % (self.compose.compose_date, self.compose.compose_respin)

+             return self.compose.image_release

          return image_conf.get('release', None)

  

      def run(self):
@@ -122,6 +122,7 @@

          for variant in self.compose.variants.values():

              for arch in variant.arches + ["src"]:

                  for data in get_arch_variant_data(self.compose.conf, "live_images", arch, variant):

+                     subvariant = data.get('subvariant', variant.uid)

                      type = data.get('type', 'live')

  

                      if type == 'live':
@@ -136,7 +137,7 @@

                          continue

  

                      cmd = {

-                         "name": data.get("name", None),

+                         "name": data.get('name'),

                          "version": data.get("version", None),

                          "release": self._get_release(data),

                          "dest_dir": dest_dir,
@@ -153,6 +154,7 @@

                          "sign": False,

                          "type": type,

                          "label": "",  # currently not used

+                         "subvariant": subvariant,

                      }

  

                      if 'ksurl' in data:
@@ -178,6 +180,8 @@

          if self.compose.conf.get('live_images_no_rename', False):

              return None

  

+         disc_type = self.compose.conf.get('disc_types', {}).get('live', 'live')

+ 

          format = "%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"

          # Custom name (prefix)

          if name:
@@ -186,8 +190,8 @@

                  custom_iso_name += "-%s" % version

              format = custom_iso_name + "-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"

  

-         # XXX: hardcoded disc_type and disc_num

-         return self.compose.get_image_name(arch, variant, disc_type="live",

+         # XXX: hardcoded disc_num

+         return self.compose.get_image_name(arch, variant, disc_type=disc_type,

                                             disc_num=None, format=format)

  

      def stop(self, *args, **kwargs):
@@ -201,26 +205,25 @@

  

      def process(self, item, num):

          compose, cmd, variant, arch = item

-         try:

+         with failable(compose, variant, arch, 'live', 'Creating live images'):

              self.worker(compose, cmd, variant, arch, num)

-         except Exception as exc:

-             if not compose.can_fail(variant, arch, 'live'):

-                 raise

-             else:

-                 msg = ('[FAIL] Creating live image for variant %s, arch %s failed, but going on anyway.\n%s'

-                        % (variant.uid, arch, exc))

-                 self.pool.log_info(msg)

  

      def worker(self, compose, cmd, variant, arch, num):

          self.basename = '%(name)s-%(version)s-%(release)s' % cmd

-         log_file = compose.paths.log.log_file(arch, "createiso-%s" % self.basename)

+         log_file = compose.paths.log.log_file(arch, "liveimage-%s" % self.basename)

+ 

+         subvariant = cmd.pop('subvariant')

+ 

+         imgname = "%s-%s-%s-%s" % (compose.ci_base.release.short, subvariant,

+                                    'Live' if cmd['type'] == 'live' else 'Disk',

+                                    arch)

  

          msg = "Creating ISO (arch: %s, variant: %s): %s" % (arch, variant, self.basename)

          self.pool.log_info("[BEGIN] %s" % msg)

  

          koji_wrapper = KojiWrapper(compose.conf["koji_profile"])

-         name, version = compose.compose_id.rsplit("-", 1)

-         name = cmd["name"] or name

+         _, version = compose.compose_id.rsplit("-", 1)

+         name = cmd["name"] or imgname

          version = cmd["version"] or version

          archive = False

          if cmd["specfile"] and not cmd["scratch"]:
@@ -272,12 +275,15 @@

              for rpm_path in rpm_paths:

                  shutil.copy2(rpm_path, cmd["dest_dir"])

  

-         self._write_manifest(destination)

-         self._add_to_images(compose, variant, arch, cmd['type'], self._get_format(image_path), destination)

+         if cmd['type'] == 'live':

+             # ISO manifest only makes sense for live images

+             self._write_manifest(destination)

+ 

+         self._add_to_images(compose, variant, subvariant, arch, cmd['type'], self._get_format(image_path), destination)

  

          self.pool.log_info("[DONE ] %s" % msg)

  

-     def _add_to_images(self, compose, variant, arch, type, format, path):

+     def _add_to_images(self, compose, variant, subvariant, arch, type, format, path):

          """Adds the image to images.json"""

          img = Image(compose.im)

          img.type = 'raw-xz' if type == 'appliance' else type
@@ -289,6 +295,7 @@

          img.disc_number = 1     # We don't expect multiple disks

          img.disc_count = 1

          img.bootable = True

+         img.subvariant = subvariant

          compose.im.add(variant=variant.uid, arch=arch, image=img)

  

      def _is_image(self, path):

file modified
+69 -27
@@ -4,7 +4,7 @@

  import time

  from kobo import shortcuts

  

- from pungi.util import get_variant_data, resolve_git_url, makedirs, get_mtime, get_file_size

+ from pungi.util import get_variant_data, resolve_git_url, makedirs, get_mtime, get_file_size, failable

  from pungi.phases.base import PhaseBase

  from pungi.linker import Linker

  from pungi.paths import translate_path
@@ -17,9 +17,33 @@

      """class for wrapping up koji spin-livemedia"""

      name = 'live_media'

  

+     config_options = (

+         {

+             "name": "live_media",

+             "expected_types": [dict],

+             "optional": True,

+         },

+         {

+             "name": "live_media_ksurl",

+             "expected_types": [str],

+             "optional": True,

+         },

+         {

+             "name": "live_media_target",

+             "expected_types": [str],

+             "optional": True,

+         },

+         {

+             "name": "live_media_release",

+             "expected_types": [str, type(None)],

+             "optional": True,

+         }

+     )

+ 

      def __init__(self, compose):

          super(LiveMediaPhase, self).__init__(compose)

          self.pool = ThreadPool(logger=self.compose._logger)

+         self._global_ksurl = None

  

      def skip(self):

          if super(LiveMediaPhase, self).skip():
@@ -61,10 +85,13 @@

          return sorted(arches)

  

      def _get_release(self, image_conf):

-         """If release is set explicitly to None, replace it with date and respin."""

-         if 'release' in image_conf and image_conf['release'] is None:

-             return '%s.%s' % (self.compose.compose_date, self.compose.compose_respin)

-         return image_conf.get('release', None)

+         """If release is set explicitly to None, replace it with date and respin.

+         Uses both image configuration and global config.

+         """

+         for key, conf in [('release', image_conf), ('live_media_release', self.compose.conf)]:

+             if key in conf and conf[key] is None:

+                 return self.compose.image_release

+         return image_conf.get('release', self.compose.conf.get('live_media_release'))

  

      def _get_install_tree(self, image_conf, variant):

          if 'install_tree_from' in image_conf:
@@ -80,25 +107,45 @@

              self.compose.paths.compose.os_tree('$basearch', variant, create_dir=False)

          )

  

+     @property

+     def global_ksurl(self):

+         """Get globally configure kickstart URL. It will only be resolved once."""

+         if not self._global_ksurl:

+             ksurl = self.compose.conf.get('live_media_ksurl')

+             self._global_ksurl = resolve_git_url(ksurl)

+         return self._global_ksurl

+ 

+     def _get_ksurl(self, image_conf):

+         """Get ksurl from `image_conf`. If not present, fall back to global one."""

+         if 'ksurl' in image_conf:

+             return resolve_git_url(image_conf['ksurl'])

+         return self.global_ksurl

+ 

+     def _get_config(self, image_conf, opt):

+         return image_conf.get(opt, self.compose.conf.get('live_media_' + opt))

+ 

      def run(self):

          for variant in self.compose.get_variants():

              arches = set([x for x in variant.arches if x != 'src'])

- 

              for image_conf in get_variant_data(self.compose.conf, self.name, variant):

+                 subvariant = image_conf.get('subvariant', variant.uid)

+                 name = image_conf.get(

+                     'name', "%s-%s-Live" % (self.compose.ci_base.release.short, subvariant))

                  config = {

-                     'target': image_conf['target'],

+                     'target': self._get_config(image_conf, 'target'),

                      'arches': self._get_arches(image_conf, arches),

                      'ksfile': image_conf['kickstart'],

-                     'ksurl': resolve_git_url(image_conf['ksurl']),

+                     'ksurl': self._get_ksurl(image_conf),

                      'ksversion': image_conf.get('ksversion'),

                      'scratch': image_conf.get('scratch', False),

                      'release': self._get_release(image_conf),

                      'skip_tag': image_conf.get('skip_tag'),

-                     'name': image_conf['name'],

+                     'name': name,

+                     'subvariant': subvariant,

                      'title': image_conf.get('title'),

                      'repo': self._get_repos(image_conf, variant),

                      'install_tree': self._get_install_tree(image_conf, variant),

-                     'version': image_conf['version'],

+                     'version': self._get_config(image_conf, 'version'),

                  }

                  self.pool.add(LiveMediaThread(self.pool))

                  self.pool.queue_put((self.compose, variant, config))
@@ -109,20 +156,15 @@

  class LiveMediaThread(WorkerThread):

      def process(self, item, num):

          compose, variant, config = item

+         subvariant = config.pop('subvariant')

          self.num = num

-         try:

-             self.worker(compose, variant, config)

-         except Exception as exc:

-             if not compose.can_fail(variant, '*', 'live-media'):

-                 raise

-             else:

-                 msg = ('[FAIL] live-media for variant %s failed, but going on anyway.\n%s'

-                        % (variant.uid, exc))

-                 self.pool.log_info(msg)

- 

-     def _get_log_file(self, compose, variant, config):

+         with failable(compose, variant, '*', 'live-media'):

+             self.worker(compose, variant, subvariant, config)

+ 

+     def _get_log_file(self, compose, variant, subvariant, config):

          arches = '-'.join(config['arches'])

-         return compose.paths.log.log_file(arches, 'livemedia-%s' % variant.uid)

+         return compose.paths.log.log_file(arches, 'livemedia-%s-%s'

+                                           % (variant.uid, subvariant))

  

      def _run_command(self, koji_wrapper, cmd, compose, log_file):

          time.sleep(self.num * 3)
@@ -140,16 +182,15 @@

          copy['arch'] = ','.join(copy.pop('arches', []))

          return koji_wrapper.get_live_media_cmd(copy)

  

-     def worker(self, compose, variant, config):

-         msg = 'Live media: %s (arches: %s, variant: %s)' % (config['name'],

-                                                             ' '.join(config['arches']),

-                                                             variant.uid)

+     def worker(self, compose, variant, subvariant, config):

+         msg = ('Live media: %s (arches: %s, variant: %s, subvariant: %s)'

+                % (config['name'], ' '.join(config['arches']), variant.uid, subvariant))

          self.pool.log_info('[BEGIN] %s' % msg)

  

          koji_wrapper = KojiWrapper(compose.conf['koji_profile'])

          cmd = self._get_cmd(koji_wrapper, config)

  

-         log_file = self._get_log_file(compose, variant, config)

+         log_file = self._get_log_file(compose, variant, subvariant, config)

          output = self._run_command(koji_wrapper, cmd, compose, log_file)

  

          # collect results and update manifest
@@ -192,6 +233,7 @@

              img.disc_number = 1     # We don't expect multiple disks

              img.disc_count = 1

              img.bootable = True

+             img.subvariant = subvariant

              compose.im.add(variant=variant.uid, arch=image_info['arch'], image=img)

  

          self.pool.log_info('[DONE ] %s' % msg)

file added
+109
@@ -0,0 +1,109 @@

+ # -*- coding: utf-8 -*-

+ 

+ import os

+ from kobo.threads import ThreadPool, WorkerThread

+ import re

+ 

+ from .base import ConfigGuardedPhase

+ from .. import util

+ from ..paths import translate_path

+ from ..wrappers import kojiwrapper, scm

+ 

+ 

+ class OSTreePhase(ConfigGuardedPhase):

+     name = 'ostree'

+ 

+     config_options = [

+         {

+             "name": "ostree",

+             "expected_types": [list],

+             "optional": True,

+         }

+     ]

+ 

+     def __init__(self, compose):

+         super(OSTreePhase, self).__init__(compose)

+         self.pool = ThreadPool(logger=self.compose._logger)

+ 

+     def run(self):

+         for variant in self.compose.get_variants():

+             for arch in variant.arches:

+                 for conf in util.get_arch_variant_data(self.compose.conf, self.name, arch, variant):

+                     self.pool.add(OSTreeThread(self.pool))

+                     self.pool.queue_put((self.compose, variant, arch, conf))

+ 

+         self.pool.start()

+ 

+ 

+ class OSTreeThread(WorkerThread):

+     def process(self, item, num):

+         compose, variant, arch, config = item

+         self.num = num

+         with util.failable(compose, variant, arch, 'ostree'):

+             self.worker(compose, variant, arch, config)

+ 

+     def worker(self, compose, variant, arch, config):

+         msg = 'OSTree phase for variant %s, arch %s' % (variant.uid, arch)

+         self.pool.log_info('[BEGIN] %s' % msg)

+         workdir = compose.paths.work.topdir('ostree')

+         self.logdir = compose.paths.log.topdir('%s/ostree' % arch)

+         repodir = os.path.join(workdir, 'config_repo')

+ 

+         source_variant = compose.variants[config['source_repo_from']]

+         source_repo = translate_path(compose, compose.paths.compose.repository(arch, source_variant))

+ 

+         self._clone_repo(repodir, config['config_url'], config.get('config_branch', 'master'))

+         self._tweak_mirrorlist(repodir, source_repo)

+ 

+         self._run_ostree_cmd(compose, variant, arch, config, repodir)

+ 

+         self.pool.log_info('[DONE ] %s' % msg)

+ 

+     def _run_ostree_cmd(self, compose, variant, arch, config, config_repo):

+         cmd = [

+             'pungi-make-ostree',

+             '--log-dir=%s' % os.path.join(self.logdir),

+             '--treefile=%s' % os.path.join(config_repo, config['treefile']),

+             config['ostree_repo']

+         ]

+ 

+         runroot_channel = compose.conf.get("runroot_channel", None)

+         runroot_tag = compose.conf["runroot_tag"]

+ 

+         packages = ['pungi', 'ostree', 'rpm-ostree']

+         log_file = os.path.join(self.logdir, 'runroot.log')

+         mounts = [compose.topdir, config['ostree_repo']]

+         koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])

+         koji_cmd = koji.get_runroot_cmd(runroot_tag, arch, cmd,

+                                         channel=runroot_channel,

+                                         use_shell=True, task_id=True,

+                                         packages=packages, mounts=mounts)

+         output = koji.run_runroot_cmd(koji_cmd, log_file=log_file)

+         if output["retcode"] != 0:

+             raise RuntimeError("Runroot task failed: %s. See %s for more details."

+                                % (output["task_id"], log_file))

+ 

+     def _clone_repo(self, repodir, url, branch):

+         scm.get_dir_from_scm({'scm': 'git', 'repo': url, 'branch': branch, 'dir': '.'},

+                              repodir, logger=self.pool._logger)

+ 

+     def _tweak_mirrorlist(self, repodir, source_repo):

+         for file in os.listdir(repodir):

+             if file.endswith('.repo'):

+                 tweak_file(os.path.join(repodir, file), source_repo)

+ 

+ 

+ def tweak_file(path, source_repo):

+     """

+     Ensure a given .repo file points to `source_repo`.

+ 

+     This function replaces all lines starting with `mirrorlist`, `metalink` or

+     `baseurl` with `baseurl` set to requested repository.

+     """

+     with open(path, 'r') as f:

+         contents = f.read()

+     replacement = 'baseurl=%s' % source_repo

+     exp = re.compile(r'^(mirrorlist|metalink|baseurl)=.*$', re.MULTILINE)

+     contents = exp.sub(replacement, contents)

+     with open(path, 'w') as f:

+         f.write(contents)

@@ -0,0 +1,171 @@

+ # -*- coding: utf-8 -*-

+ 

+ import os

+ from kobo.threads import ThreadPool, WorkerThread

+ import shutil

+ from productmd import images

+ import pipes

+ from kobo import shortcuts

+ 

+ from .base import ConfigGuardedPhase

+ from .. import util

+ from ..paths import translate_path

+ from ..wrappers import kojiwrapper, iso, lorax, scm

+ 

+ 

+ class OstreeInstallerPhase(ConfigGuardedPhase):

+     name = 'ostree_installer'

+ 

+     config_options = [

+         {

+             "name": "ostree_installer",

+             "expected_types": [list],

+             "optional": True,

+         }

+     ]

+ 

+     def __init__(self, compose):

+         super(OstreeInstallerPhase, self).__init__(compose)

+         self.pool = ThreadPool(logger=self.compose._logger)

+ 

+     def run(self):

+         for variant in self.compose.get_variants():

+             for arch in variant.arches:

+                 for conf in util.get_arch_variant_data(self.compose.conf, self.name, arch, variant):

+                     self.pool.add(OstreeInstallerThread(self.pool))

+                     self.pool.queue_put((self.compose, variant, arch, conf))

+ 

+         self.pool.start()

+ 

+ 

+ class OstreeInstallerThread(WorkerThread):

+     def process(self, item, num):

+         compose, variant, arch, config = item

+         self.num = num

+         with util.failable(compose, variant, arch, 'ostree-installer', 'Ostree installer'):

+             self.worker(compose, variant, arch, config)

+ 

+     def worker(self, compose, variant, arch, config):

+         msg = 'Ostree phase for variant %s, arch %s' % (variant.uid, arch)

+         self.pool.log_info('[BEGIN] %s' % msg)

+         self.logdir = compose.paths.log.topdir('%s/ostree_installer' % arch)

+ 

+         source_repo = self._get_source_repo(compose, arch, config['source_repo_from'])

+         output_dir = os.path.join(compose.paths.work.topdir(arch), variant.uid, 'ostree_installer')

+         util.makedirs(os.path.dirname(output_dir))

+ 

+         self.template_dir = os.path.join(compose.paths.work.topdir(arch), variant.uid, 'lorax_templates')

+         self._clone_templates(config.get('template_repo'), config.get('template_branch'))

+ 

+         self._run_ostree_cmd(compose, variant, arch, config, source_repo, output_dir)

+ 

+         disc_type = compose.conf.get('disc_types', {}).get('dvd', 'dvd')

+         filename = compose.get_image_name(arch, variant, disc_type=disc_type)

+         self._copy_image(compose, variant, arch, filename, output_dir)

+         self._add_to_manifest(compose, variant, arch, filename)

+         self.pool.log_info('[DONE ] %s' % msg)

+ 

+     def _get_source_repo(self, compose, arch, source):

+         """

+         If `source` is a URL, return it as-is (possibly replacing $arch with

+         actual arch. Otherwise treat is a a variant name and return path to

+         repo in that variant.

+         """

+         if '://' in source:

+             return source.replace('$arch', arch)

+         source_variant = compose.variants[source]

+         return translate_path(

+             compose, compose.paths.compose.repository(arch, source_variant, create_dir=False))

+ 

+     def _clone_templates(self, url, branch='master'):

+         if not url:

+             self.template_dir = None

+             return

+         scm.get_dir_from_scm({'scm': 'git', 'repo': url, 'branch': branch, 'dir': '.'},

+                              self.template_dir, logger=self.pool._logger)

+ 

+     def _get_release(self, compose, config):

+         if 'release' in config and config['release'] is None:

+             return compose.image_release

+         return config.get('release', None)

+ 

+     def _copy_image(self, compose, variant, arch, filename, output_dir):

+         iso_path = compose.paths.compose.iso_path(arch, variant, filename)

+         boot_iso = os.path.join(output_dir, 'images', 'boot.iso')

+ 

+         shortcuts.run('cp -av %s/* %s/' %

+                       (pipes.quote(output_dir), pipes.quote(os.path.dirname(iso_path))))

+         try:

+             os.link(boot_iso, iso_path)

+         except OSError:

+             shutil.copy2(boot_iso, iso_path)

+ 

+     def _add_to_manifest(self, compose, variant, arch, filename):

+         full_iso_path = compose.paths.compose.iso_path(arch, variant, filename)

+         iso_path = compose.paths.compose.iso_path(arch, variant, filename, relative=True)

+         iso_wrapper = iso.IsoWrapper()

+         implant_md5 = iso_wrapper.get_implanted_md5(full_iso_path)

+ 

+         img = images.Image(compose.im)

+         img.path = iso_path

+         img.mtime = util.get_mtime(full_iso_path)

+         img.size = util.get_file_size(full_iso_path)

+         img.arch = arch

+         img.type = "boot"

+         img.format = "iso"

+         img.disc_number = 1

+         img.disc_count = 1

+         img.bootable = True

+         img.subvariant = variant.name

+         img.implant_md5 = implant_md5

+         try:

+             img.volume_id = iso_wrapper.get_volume_id(full_iso_path)

+         except RuntimeError:

+             pass

+         compose.im.add(variant.uid, arch, img)

+ 

+     def _get_templates(self, config, key):

+         """Retrieve all templates from configuration and make sure the paths

+         are absolute. Raises RuntimeError if template repo is needed but not

+         configured.

+         """

+         templates = []

+         for template in config.get(key, []):

+             if template[0] != '/':

+                 if not self.template_dir:

+                     raise RuntimeError('Relative path to template without setting template_repo.')

+                 template = os.path.join(self.template_dir, template)

+             templates.append(template)

+         return templates

+ 

+     def _run_ostree_cmd(self, compose, variant, arch, config, source_repo, output_dir):

+         lorax_wrapper = lorax.LoraxWrapper()

+         cmd = lorax_wrapper.get_lorax_cmd(

+             compose.conf['release_name'],

+             compose.conf["release_version"],

+             self._get_release(compose, config),

+             repo_baseurl=source_repo,

+             output_dir=output_dir,

+             variant=variant.uid,

+             nomacboot=True,

+             buildinstallpackages=config.get('installpkgs'),

+             add_template=self._get_templates(config, 'add_template'),

+             add_arch_template=self._get_templates(config, 'add_arch_template'),

+             add_template_var=config.get('add_template_var'),

+             add_arch_template_var=config.get('add_arch_template_var')

+         )

+ 

+         runroot_channel = compose.conf.get("runroot_channel", None)

+         runroot_tag = compose.conf["runroot_tag"]

+ 

+         packages = ['pungi', 'lorax', 'ostree']

+         log_file = os.path.join(self.logdir, 'runroot.log')

+         koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])

+         koji_cmd = koji.get_runroot_cmd(runroot_tag, arch, cmd,

+                                         channel=runroot_channel,

+                                         use_shell=True, task_id=True,

+                                         packages=packages, mounts=[compose.topdir])

+         output = koji.run_runroot_cmd(koji_cmd, log_file=log_file)

+         if output["retcode"] != 0:

+             raise RuntimeError("Runroot task failed: %s. See %s for more details."

+                                % (output["task_id"], log_file))

file modified
+2 -45
@@ -17,12 +17,12 @@

  

  import os

  

- from kobo.shortcuts import run, force_list, relative_path

+ from kobo.shortcuts import run, relative_path

  

  import pungi.phases.pkgset.pkgsets

  from pungi.arch import get_valid_arches

  from pungi.wrappers.createrepo import CreaterepoWrapper

- from pungi.util import is_arch_multilib

+ from pungi.util import is_arch_multilib, find_old_compose

  

  

  # TODO: per arch?
@@ -92,46 +92,3 @@

      cmd = repo.get_createrepo_cmd(path_prefix, update=True, database=True, skip_stat=True, pkglist=compose.paths.work.package_list(arch=arch), outputdir=repo_dir, baseurl="file://%s" % path_prefix, workers=5, groupfile=comps_path, update_md_path=repo_dir_global, checksum=createrepo_checksum)

      run(cmd, logfile=compose.paths.log.log_file(arch, "arch_repo"), show_cmd=True)

      compose.log_info("[DONE ] %s" % msg)

- 

- 

- def find_old_compose(old_compose_dirs, release_short, release_version, base_product_short=None, base_product_version=None):

-     composes = []

- 

-     for compose_dir in force_list(old_compose_dirs):

-         if not os.path.isdir(compose_dir):

-             continue

- 

-         # get all finished composes

-         for i in os.listdir(compose_dir):

-             # TODO: read .composeinfo

- 

-             pattern = "%s-%s" % (release_short, release_version)

-             if base_product_short:

-                 pattern += "-%s" % base_product_short

-             if base_product_version:

-                 pattern += "-%s" % base_product_version

- 

-             if not i.startswith(pattern):

-                 continue

- 

-             path = os.path.join(compose_dir, i)

-             if not os.path.isdir(path):

-                 continue

- 

-             if os.path.islink(path):

-                 continue

- 

-             status_path = os.path.join(path, "STATUS")

-             if not os.path.isfile(status_path):

-                 continue

- 

-             try:

-                 if open(status_path, "r").read().strip() in ("FINISHED", "DOOMED"):

-                     composes.append((i, os.path.abspath(path)))

-             except:

-                 continue

- 

-     if not composes:

-         return None

- 

-     return sorted(composes)[-1][1]

@@ -202,7 +202,7 @@

  

      def __getstate__(self):

          result = self.__dict__.copy()

-         result["koji_profile"] = self.koji_wrapper.koji_module.config.profile

+         result["koji_profile"] = self.koji_wrapper.profile

          del result["koji_wrapper"]

          del result["_logger"]

          return result
@@ -225,12 +225,14 @@

          rpm_path = None

          found = False

          pathinfo = self.koji_wrapper.koji_module.pathinfo

+         paths = []

          for sigkey in self.sigkey_ordering:

              if sigkey is None:

                  # we're looking for *signed* copies here

                  continue

              sigkey = sigkey.lower()

              rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey))

+             paths.append(rpm_path)

              if os.path.isfile(rpm_path):

                  found = True

                  break
@@ -239,14 +241,17 @@

              if None in self.sigkey_ordering:

                  # use an unsigned copy (if allowed)

                  rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))

+                 paths.append(rpm_path)

                  if os.path.isfile(rpm_path):

                      found = True

              else:

                  # or raise an exception

-                 raise RuntimeError("RPM %s not found for sigs: %s" % (rpm_info, self.sigkey_ordering))

+                 raise RuntimeError("RPM %s not found for sigs: %s. Paths checked: %s"

+                                    % (rpm_info, self.sigkey_ordering, paths))

  

          if not found:

-             raise RuntimeError("Package not found: %s" % rpm_info)

+             raise RuntimeError("Package not found: %s. Paths checked: %s"

+                                % (rpm_info, paths))

          return rpm_path

  

      def populate(self, tag, event=None, inherit=True):
@@ -258,7 +263,7 @@

  

          msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (tag, event, inherit)

          self.log_info("[BEGIN] %s" % msg)

-         rpms, builds = self.get_latest_rpms(tag, event)

+         rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)

  

          builds_by_id = {}

          for build_info in builds:

file modified
+2
@@ -49,6 +49,8 @@

          arches = get_valid_arches(arch, is_multilib)

          all_arches.update(arches)

          for variant in compose.get_variants(arch=arch):

+             if variant.is_empty:

+                 continue

              lookaside = {}

              if variant.parent:

                  repo_id = "repoclosure-%s.%s" % (variant.parent.uid, arch)

file modified
+121 -15
@@ -24,8 +24,10 @@

  import pipes

  import re

  import urlparse

+ import contextlib

+ import traceback

  

- from kobo.shortcuts import run

+ from kobo.shortcuts import run, force_list

  from productmd.common import get_major_version

  

  
@@ -136,13 +138,11 @@

  

  

  def makedirs(path, mode=0o775):

-     mask = os.umask(0)

      try:

          os.makedirs(path, mode=mode)

      except OSError as ex:

          if ex.errno != errno.EEXIST:

              raise

-     os.umask(mask)

  

  

  def rmtree(path, ignore_errors=False, onerror=None):
@@ -220,9 +220,18 @@

      return bool(get_arch_variant_data(conf, 'multilib', arch, None))

  

  

+ def _get_git_ref(fragment):

+     if fragment == 'HEAD':

+         return fragment

+     if fragment.startswith('origin/'):

+         branch = fragment.split('/', 1)[1]

+         return 'refs/heads/' + branch

+     return None

+ 

+ 

  def resolve_git_url(url):

-     """Given a url to a Git repo specifying HEAD as a ref, replace that

-     specifier with actual SHA1 of the commit.

+     """Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,

+     replace that specifier with actual SHA1 of the commit.

  

      Otherwise, the original URL will be returned.

  
@@ -230,11 +239,16 @@

      run git command.

      """

      r = urlparse.urlsplit(url)

-     if r.fragment != 'HEAD':

+     ref = _get_git_ref(r.fragment)

+     if not ref:

          return url

  

-     baseurl = urlparse.urlunsplit((r.scheme, r.netloc, r.path, '', ''))

-     _, output = run(['git', 'ls-remote', baseurl, r.fragment])

+     # Remove git+ prefix from scheme if present. This is for resolving only,

+     # the final result must use original scheme.

+     scheme = r.scheme.replace('git+', '')

+ 

+     baseurl = urlparse.urlunsplit((scheme, r.netloc, r.path, '', ''))

+     _, output = run(['git', 'ls-remote', baseurl, ref])

  

      lines = [line for line in output.split('\n') if line]

      if len(lines) != 1:
@@ -360,13 +374,14 @@

          if not variant_uid and "%(variant)s" in i:

              continue

          try:

-             volid = i % compose.get_format_substs(variant=variant_uid,

-                                                   release_short=release_short,

-                                                   version=release_version,

-                                                   arch=arch,

-                                                   disc_type=disc_type or '',

-                                                   base_product_short=base_product_short,

-                                                   base_product_version=base_product_version)

+             volid = i % get_format_substs(compose,

+                                           variant=variant_uid,

+                                           release_short=release_short,

+                                           version=release_version,

+                                           arch=arch,

+                                           disc_type=disc_type or '',

+                                           base_product_short=base_product_short,

+                                           base_product_version=base_product_version)

          except KeyError as err:

              raise RuntimeError('Failed to create volume id: unknown format element: %s' % err.message)

          volid = _apply_substitutions(compose, volid)
@@ -391,3 +406,94 @@

  

  def get_file_size(path):

      return os.path.getsize(path)

+ 

+ 

+ def find_old_compose(old_compose_dirs, release_short, release_version,

+                      base_product_short=None, base_product_version=None):

+     composes = []

+ 

+     for compose_dir in force_list(old_compose_dirs):

+         if not os.path.isdir(compose_dir):

+             continue

+ 

+         # get all finished composes

+         for i in os.listdir(compose_dir):

+             # TODO: read .composeinfo

+ 

+             pattern = "%s-%s" % (release_short, release_version)

+             if base_product_short:

+                 pattern += "-%s" % base_product_short

+             if base_product_version:

+                 pattern += "-%s" % base_product_version

+ 

+             if not i.startswith(pattern):

+                 continue

+ 

+             path = os.path.join(compose_dir, i)

+             if not os.path.isdir(path):

+                 continue

+ 

+             if os.path.islink(path):

+                 continue

+ 

+             status_path = os.path.join(path, "STATUS")

+             if not os.path.isfile(status_path):

+                 continue

+ 

+             try:

+                 with open(status_path, 'r') as f:

+                     if f.read().strip() in ("FINISHED", "FINISHED_INCOMPLETE", "DOOMED"):

+                         composes.append((i, os.path.abspath(path)))

+             except:

+                 continue

+ 

+     if not composes:

+         return None

+ 

+     return sorted(composes)[-1][1]

+ 

+ 

+ def process_args(fmt, args):

+     """Given a list of arguments, format each value with the format string.

+ 

+     >>> process_args('--opt=%s', ['foo', 'bar'])

+     ['--opt=foo', '--opt=bar']

+     """

+     return [fmt % val for val in force_list(args or [])]

+ 

+ 

+ @contextlib.contextmanager

+ def failable(compose, variant, arch, deliverable, msg=None):

+     """If a deliverable can fail, log a message and go on as if it succeeded."""

+     msg = msg or deliverable.capitalize()

+     try:

+         yield

+     except Exception as exc:

+         if not compose.can_fail(variant, arch, deliverable):

+             raise

+         else:

+             compose.log_info('[FAIL] %s (variant %s, arch %s) failed, but going on anyway.'

+                              % (msg, variant.uid if variant else 'None', arch))

+             compose.log_info(str(exc))

+             tb = traceback.format_exc()

+             compose.log_debug(tb)

+ 

+ 

+ def get_format_substs(compose, **kwargs):

+     """Return a dict of basic format substitutions.

+ 

+     Any kwargs will be added as well.

+     """

+     substs = {

+         'compose_id': compose.compose_id,

+         'release_short': compose.ci_base.release.short,

+         'version': compose.ci_base.release.version,

+         'date': compose.compose_date,

+         'respin': compose.compose_respin,

+         'type': compose.compose_type,

+         'type_suffix': compose.compose_type_suffix,

+         'label': compose.compose_label,

+         'label_major_version': compose.compose_label_major_version,

+     }

+     substs.update(kwargs)

+     return substs

file modified
+18 -23
@@ -28,15 +28,15 @@

              self.mergerepo = "mergerepo"

          self.modifyrepo = "modifyrepo"

  

-     def get_createrepo_cmd(self, directory, baseurl=None, outputdir=None, excludes=None, pkglist=None, groupfile=None, cachedir=None,

-                            update=True, update_md_path=None, skip_stat=False, checkts=False, split=False, pretty=True, database=True, checksum=None,

-                            unique_md_filenames=True, distro=None, content=None, repo=None, revision=None, deltas=False, oldpackagedirs=None,

-                            num_deltas=None, workers=None):

+     def get_createrepo_cmd(self, directory, baseurl=None, outputdir=None, excludes=None,

+                            pkglist=None, groupfile=None, cachedir=None, update=True,

+                            update_md_path=None, skip_stat=False, checkts=False, split=False,

+                            pretty=True, database=True, checksum=None, unique_md_filenames=True,

+                            distro=None, content=None, repo=None, revision=None, deltas=False,

+                            oldpackagedirs=None, num_deltas=None, workers=None):

          # groupfile = /path/to/comps.xml

  

-         cmd = [self.createrepo]

- 

-         cmd.append(directory)

+         cmd = [self.createrepo, directory, '--verbose']

  

          if baseurl:

              cmd.append("--baseurl=%s" % baseurl)
@@ -44,9 +44,8 @@

          if outputdir:

              cmd.append("--outputdir=%s" % outputdir)

  

-         if excludes:

-             for i in force_list(excludes):

-                 cmd.append("--excludes=%s" % i)

+         for i in force_list(excludes or []):

+             cmd.append("--excludes=%s" % i)

  

          if pkglist:

              cmd.append("--pkglist=%s" % pkglist)
@@ -91,27 +90,23 @@

          else:

              cmd.append("--simple-md-filenames")

  

-         if distro:

-             for i in force_list(distro):

-                 cmd.append("--distro=%s" % i)

+         for i in force_list(distro or []):

+             cmd.append("--distro=%s" % i)

  

-         if content:

-             for i in force_list(content):

-                 cmd.append("--content=%s" % i)

+         for i in force_list(content or []):

+             cmd.append("--content=%s" % i)

  

-         if repo:

-             for i in force_list(repo):

-                 cmd.append("--repo=%s" % i)

+         for i in force_list(repo or []):

+             cmd.append("--repo=%s" % i)

  

          if revision:

              cmd.append("--revision=%s" % revision)

  

          if deltas:

-             cmd.append("--deltas=%s" % deltas)

+             cmd.append("--deltas")

  

-         if oldpackagedirs:

-             for i in force_list(oldpackagedirs):

-                 cmd.append("--oldpackagedirs=%s" % i)

+         for i in force_list(oldpackagedirs or []):

+             cmd.append("--oldpackagedirs=%s" % i)

  

          if num_deltas:

              cmd.append("--num-deltas=%d" % int(num_deltas))

file modified
+9 -1
@@ -83,7 +83,6 @@

                  '-allow-multidot',

                  '-chrp-boot',

                  "-map", os.path.join(createfrom, 'mapping'),  # -map %s/ppc/mapping

-                 "-magic", os.path.join(createfrom, 'magic'),  # -magic %s/ppc/magic

                  '-hfs-bless', "/ppc/mac",  # must be the last

              ]

              return result
@@ -206,6 +205,15 @@

          result = line.rsplit(":")[-1].strip()

          return result

  

+     def get_isohybrid_cmd(self, iso_path, arch):

+         # isohybrid is in syslinux which is x86 only

+         cmd = ["/usr/bin/isohybrid"]

+         # uefi is only supported on x86_64

+         if arch == "x86_64":

+             cmd.append("--uefi")

+         cmd.append(iso_path)

+         return cmd

+ 

      def get_manifest_cmd(self, iso_name):

          return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (pipes.quote(iso_name), pipes.quote(iso_name))

  

file modified
+41 -11
@@ -18,6 +18,8 @@

  import os

  import pipes

  import re

+ import time

+ import threading

  

  import koji

  import rpmUtils.arch
@@ -26,12 +28,15 @@

  

  

  class KojiWrapper(object):

+     lock = threading.Lock()

+ 

      def __init__(self, profile):

          self.profile = profile

          # assumption: profile name equals executable name (it's a symlink -> koji)

          self.executable = self.profile.replace("_", "-")

-         self.koji_module = koji.get_profile_module(profile)

-         self.koji_proxy = koji.ClientSession(self.koji_module.config.server)

+         with self.lock:

+             self.koji_module = koji.get_profile_module(profile)

+             self.koji_proxy = koji.ClientSession(self.koji_module.config.server)

  

      def get_runroot_cmd(self, target, arch, command, quiet=False, use_shell=True, channel=None, packages=None, mounts=None, weight=None, task_id=True):

          cmd = [self.executable, "runroot"]
@@ -85,7 +90,7 @@

          contains the id, it will be captured and returned.

          """

          task_id = None

-         retcode, output = run(command, can_fail=True, logfile=log_file)

+         retcode, output = run(command, can_fail=True, logfile=log_file, show_cmd=True)

          if "--task-id" in command:

              first_line = output.splitlines()[0]

              if re.match(r'^\d+$', first_line):
@@ -234,28 +239,53 @@

  

          return cmd

  

-     def run_blocking_cmd(self, command, log_file=None):

+     def _has_connection_error(self, output):

+         """Checks if output indicates connection error."""

+         return re.search('error: failed to connect\n$', output)

+ 

+     def _wait_for_task(self, task_id, logfile=None, max_retries=None):

+         """Tries to wait for a task to finish. On connection error it will

+         retry with `watch-task` command.

+         """

+         cmd = [self.executable, 'watch-task', str(task_id)]

+         attempt = 0

+ 

+         while True:

+             retcode, output = run(cmd, can_fail=True, logfile=logfile)

+ 

+             if retcode == 0 or not self._has_connection_error(output):

+                 # Task finished for reason other than connection error.

+                 return retcode, output

+ 

+             attempt += 1

+             if max_retries and attempt >= max_retries:

+                 break

+             time.sleep(attempt * 10)

+ 

+         raise RuntimeError('Failed to wait for task %s. Too many connection errors.' % task_id)

+ 

+     def run_blocking_cmd(self, command, log_file=None, max_retries=None):

          """

          Run a blocking koji command. Returns a dict with output of the command,

          its exit code and parsed task id. This method will block until the

          command finishes.

          """

-         try:

-             retcode, output = run(command, can_fail=True, logfile=log_file)

-         except RuntimeError, e:

-             raise RuntimeError("%s. %s failed with '%s'" % (e, command, output))

+         retcode, output = run(command, can_fail=True, logfile=log_file)

  

          match = re.search(r"Created task: (\d+)", output)

          if not match:

              raise RuntimeError("Could not find task ID in output. Command '%s' returned '%s'."

                                 % (" ".join(command), output))

+         task_id = int(match.groups()[0])

+ 

+         if retcode != 0 and self._has_connection_error(output):

+             retcode, output = self._wait_for_task(task_id, logfile=log_file, max_retries=max_retries)

  

-         result = {

+         return {

              "retcode": retcode,

              "output": output,

-             "task_id": int(match.groups()[0]),

+             "task_id": task_id,

          }

-         return result

  

      def get_image_paths(self, task_id):

          """

file modified
+9 -3
@@ -18,12 +18,15 @@

  import os

  

  from kobo.shortcuts import force_list

+ from ..util import process_args

  

  

  class LoraxWrapper(object):

      def get_lorax_cmd(self, product, version, release, repo_baseurl, output_dir,

                        variant=None, bugurl=None, nomacboot=False, noupgrade=False,

-                       is_final=False, buildarch=None, volid=None, buildinstallpackages=None):

+                       is_final=False, buildarch=None, volid=None, buildinstallpackages=None,

+                       add_template=None, add_arch_template=None,

+                       add_template_var=None, add_arch_template_var=None):

          cmd = ["lorax"]

          cmd.append("--product=%s" % product)

          cmd.append("--version=%s" % version)
@@ -55,8 +58,11 @@

          if volid:

              cmd.append("--volid=%s" % volid)

  

-         if buildinstallpackages:

-             cmd.extend(["--installpkgs=%s" % package for package in buildinstallpackages])

+         cmd.extend(process_args('--installpkgs=%s', buildinstallpackages))

+         cmd.extend(process_args('--add-template=%s', add_template))

+         cmd.extend(process_args('--add-arch-template=%s', add_arch_template))

+         cmd.extend(process_args('--add-template-var=%s', add_template_var))

+         cmd.extend(process_args('--add-arch-template-var=%s', add_arch_template_var))

  

          output_dir = os.path.abspath(output_dir)

          cmd.append(output_dir)

file modified
+3 -6
@@ -15,10 +15,11 @@

  # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.

  

  

- import errno

  import os

  import re

  

+ from .. import util

+ 

  

  PACKAGES_RE = {

      "rpm": re.compile(r"^RPM(\((?P<flags>[^\)]+)\))?: (?:file://)?(?P<path>/?[^ ]+)$"),
@@ -43,11 +44,7 @@

          ks_path = os.path.abspath(ks_path)

  

          ks_dir = os.path.dirname(ks_path)

-         try:

-             os.makedirs(ks_dir)

-         except OSError as ex:

-             if ex.errno != errno.EEXIST:

-                 raise

+         util.makedirs(ks_dir)

  

          kickstart = open(ks_path, "w")

  

file modified
+55 -21
@@ -29,25 +29,36 @@

          return (a > b) - (a < b)

  

  

- VARIANTS_DTD = "/usr/share/pungi/variants.dtd"

- if not os.path.isfile(VARIANTS_DTD):

-     DEVEL_VARIANTS_DTD = os.path.normpath(os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "..", "share", "variants.dtd")))

-     msg = "Variants DTD not found: %s" % VARIANTS_DTD

-     if os.path.isfile(DEVEL_VARIANTS_DTD):

-         sys.stderr.write("%s\n" % msg)

-         sys.stderr.write("Using alternative DTD: %s\n" % DEVEL_VARIANTS_DTD)

-         VARIANTS_DTD = DEVEL_VARIANTS_DTD

-     else:

-         raise RuntimeError(msg)

+ def get_variants_dtd(logger=None):

+     """

+     Find the DTD for variants file. First look into the system directory, and

+     fall back to local directory.

+     """

+     variants_dtd = "/usr/share/pungi/variants.dtd"

+     if not os.path.isfile(variants_dtd):

+         devel_variants_dtd = os.path.normpath(os.path.realpath(

+             os.path.join(os.path.dirname(__file__), "..", "..", "share", "variants.dtd")))

+         msg = "Variants DTD not found: %s" % variants_dtd

+         if os.path.isfile(devel_variants_dtd):

+             if logger:

+                 logger.warning("%s", msg)

+                 logger.warning("Using alternative DTD: %s", devel_variants_dtd)

+             variants_dtd = devel_variants_dtd

+         else:

+             raise RuntimeError(msg)

+     return variants_dtd

  

  

  class VariantsXmlParser(object):

-     def __init__(self, file_obj, tree_arches=None):

+     def __init__(self, file_obj, tree_arches=None, tree_variants=None, logger=None):

          self.tree = lxml.etree.parse(file_obj)

-         self.dtd = lxml.etree.DTD(open(VARIANTS_DTD, "r"))

+         with open(get_variants_dtd(logger), 'r') as f:

+             self.dtd = lxml.etree.DTD(f)

          self.addons = {}

          self.layered_products = {}

          self.tree_arches = tree_arches

+         self.tree_variants = tree_variants

+         self.logger = logger

          self.validate()

  

      def _is_true(self, value):
@@ -62,7 +73,7 @@

              errors = [str(i) for i in self.dtd.error_log.filter_from_errors()]

              raise ValueError("Variants XML doesn't validate:\n%s" % "\n".join(errors))

  

-     def parse_variant_node(self, variant_node):

+     def parse_variant_node(self, variant_node, parent=None):

          variant_dict = {

              "id": str(variant_node.attrib["id"]),

              "name": str(variant_node.attrib["name"]),
@@ -72,9 +83,14 @@

              "environments": [],

              "buildinstallpackages": [],

              "is_empty": bool(variant_node.attrib.get("is_empty", False)),

+             "parent": parent,

          }

          if self.tree_arches:

              variant_dict["arches"] = [i for i in variant_dict["arches"] if i in self.tree_arches]

+         if not variant_dict["arches"]:

+             if self.logger:

+                 self.logger.info('Excluding variant %s: all its arches are filtered.' % variant_dict['id'])

+             return None

  

          for grouplist_node in variant_node.xpath("groups"):

              for group_node in grouplist_node.xpath("group"):
@@ -121,19 +137,21 @@

  

          contains_optional = False

          for child_node in variant_node.xpath("variants/variant"):

-             child_variant = self.parse_variant_node(child_node)

-             variant.add_variant(child_variant)

+             child_variant = self.parse_variant_node(child_node, variant)

+             if not self.add_child(child_variant, variant):

+                 continue

              if child_variant.type == "optional":

                  contains_optional = True

  

          has_optional = self._is_true(variant_node.attrib.get("has_optional", "false"))

          if has_optional and not contains_optional:

-             optional = Variant(id="optional", name="optional", type="optional", arches=variant.arches, groups=[])

-             variant.add_variant(optional)

+             optional = Variant(id="optional", name="optional", type="optional",

+                                arches=variant.arches, groups=[], parent=variant)

+             self.add_child(optional, variant)

  

          for ref in variant_node.xpath("variants/ref/@id"):

-             child_variant = self.parse_variant_node(self.addons[ref])

-             variant.add_variant(child_variant)

+             child_variant = self.parse_variant_node(self.addons[ref], variant)

+             self.add_child(child_variant, variant)

  

  # XXX: top-level optional

  #    for ref in variant_node.xpath("variants/ref/@id"):
@@ -141,6 +159,19 @@

  

          return variant

  

+     def _is_excluded(self, variant):

+         if self.tree_variants and variant.uid not in self.tree_variants:

+             if self.logger:

+                 self.logger.info('Excluding variant %s: filtered by configuration.' % variant)

+             return True

+         return False

+ 

+     def add_child(self, child, parent):

+         if not child or self._is_excluded(child):

+             return None

+         parent.add_variant(child)

+         return child

+ 

      def parse(self):

          # we allow top-level addon definitions which can be referenced in variants

          for variant_node in self.tree.xpath("/variants/variant[@type='addon']"):
@@ -154,6 +185,8 @@

          result = {}

          for variant_node in self.tree.xpath("/variants/variant[@type='variant']"):

              variant = self.parse_variant_node(variant_node)

+             if not variant or self._is_excluded(variant):

+                 continue

              result[variant.id] = variant

  

          for variant_node in self.tree.xpath("/variants/variant[not(@type='variant' or @type='addon' or @type='layered-product')]"):
@@ -163,7 +196,8 @@

  

  

  class Variant(object):

-     def __init__(self, id, name, type, arches, groups, environments=None, buildinstallpackages=None, is_empty=False):

+     def __init__(self, id, name, type, arches, groups, environments=None,

+                  buildinstallpackages=None, is_empty=False, parent=None):

          if not id.isalnum():

              raise ValueError("Variant ID must contain only alphanumeric characters: %s" % id)

  
@@ -178,7 +212,7 @@

          self.environments = sorted(copy.deepcopy(environments), lambda x, y: cmp(x["name"], y["name"]))

          self.buildinstallpackages = sorted(buildinstallpackages)

          self.variants = {}

-         self.parent = None

+         self.parent = parent

          self.is_empty = is_empty

  

      def __getitem__(self, name):

file modified
+7 -3
@@ -25,7 +25,7 @@

  

  setup(

      name            = "pungi",

-     version         = "4.0.5",  # make sure it matches with pungi.__version__

+     version         = "4.0.17",  # make sure it matches with pungi.__version__

      description     = "Distribution compose tool",

      url             = "https://pagure.io/pungi",

      author          = "Dennis Gilmore",
@@ -34,10 +34,14 @@

  

      packages        = packages,

      scripts         = [

-         'bin/pungi',

-         'bin/pungi-koji',

          'bin/comps_filter',

+         'bin/pungi',

+         'bin/pungi-config-validate',

+         'bin/pungi-createiso',

          'bin/pungi-fedmsg-notification',

+         'bin/pungi-koji',

+         'bin/pungi-make-ostree',

+         'bin/pungi-pylorax-find-templates',

      ],

      data_files      = [

          ('/usr/share/pungi', glob.glob('share/*.xsl')),

@@ -109,3 +109,5 @@

          'src': True

      }),

  ]

+ 

+ create_jigdo = False

@@ -0,0 +1,55 @@

+ {

+     "header": {

+         "version": "1.0"

+     },

+     "payload": {

+         "compose": {

+             "date": "20151203",

+             "id": "Test-20151203.0.t",

+             "respin": 0,

+             "type": "test"

+         },

+         "rpms": {

+             "Server": {

+                 "x86_64": {

+                     "bash-0:4.3.30-2.fc21.src": {

+                         "bash-0:4.3.30-2.fc21.x86_64": {

+                             "path": "Server/x86_64/os/Packages/b/bash-4.3.30-2.fc21.x86_64.rpm",

+                             "sigkey": null,

+                             "category": "binary"

+                         },

+                         "bash-0:4.3.30-2.fc21.src": {

+                             "path": "Server/source/SRPMS/b/bash-4.3.30-2.fc21.src.rpm",

+                             "sigkey": null,

+                             "category": "source"

+                         },

+                         "bash-debuginfo-0:4.3.30-2.fc21.x86_64": {

+                             "path": "Server/x86_64/debug/tree/Packages/b/bash-debuginfo-4.3.30-2.fc21.x86_64.rpm",

+                             "sigkey": null,

+                             "category": "debug"

+                         }

+                     }

+                 },

+                 "amd64": {

+                     "bash-0:4.3.30-2.fc21.src": {

+                         "bash-0:4.3.30-2.fc21.amd64": {

+                             "path": "Server/amd64/os/Packages/b/bash-4.3.30-2.fc21.amd64.rpm",

+                             "sigkey": null,

+                             "category": "binary"

+                         },

+                         "bash-0:4.3.30-2.fc21.src": {

+                             "path": "Server/source/SRPMS/b/bash-4.3.30-2.fc21.src.rpm",

+                             "sigkey": null,

+                             "category": "source"

+                         },

+                         "bash-debuginfo-0:4.3.30-2.fc21.amd64": {

+                             "path": "Server/amd64/debug/tree/Packages/b/bash-debuginfo-4.3.30-2.fc21.amd64.rpm",

+                             "sigkey": null,

+                             "category": "debug"

+                         }

+                     }

+                 }

+             }

+         }

+     }

+ }

@@ -0,0 +1,82 @@

+ <?xml version="1.0" encoding="UTF-8"?>

+ <!DOCTYPE variants PUBLIC "-//Red Hat, Inc.//DTD Variants info//EN" "variants.dtd">

+ 

+ <variants>

+   <variant id="ResilientStorage" name="Resilient Storage" type="addon">

+     <arches>

+       <arch>x86_64</arch>

+     </arches>

+     <groups>

+       <group default="true">resilient-storage</group>

+     </groups>

+   </variant>

+ 

+   <variant id="Live" name="Live" type="variant" is_empty="true">

+     <arches>

+       <arch>x86_64</arch>

+     </arches>

+   </variant>

+ 

+   <variant id="Crashy" name="Crashy" type="variant">

+     <arches>

+       <arch>ppc64le</arch>

+     </arches>

+   </variant>

+ 

+   <variant id="Gluster" name="Gluster Layered Product" type="layered-product">

+     <release name="Gluster" version="2.3" short="Gluster" />

+     <arches>

+       <arch>x86_64</arch>

+     </arches>

+     <groups>

+       <group default="true">gluster</group>

+     </groups>

+   </variant>

+ 

+   <variant id="Client" name="Client" type="variant">

+     <arches>

+       <arch>i386</arch>

+       <arch>x86_64</arch>

+     </arches>

+     <groups>

+       <group default="true">core</group>

+       <group default="true">standard</group>

+       <group default="false">text-internet</group>

+       <group default="true" uservisible="false">firefox</group>

+       <group>skype</group>

+     </groups>

+     <environments>

+       <environment>minimal</environment>

+       <environment display_order="1000">desktop</environment>

+     </environments>

+   </variant>

+ 

+   <variant id="Server" name="Server" type="variant" has_optional="true">

+     <arches>

+       <arch>x86_64</arch>

+       <arch>s390x</arch>

+     </arches>

+     <groups>

+       <group default="true" uservisible="true">core</group>

+       <group default="true">standard</group>

+       <group default="true">text-internet</group>

+     </groups>

+     <environments>

+       <environment>minimal</environment>

+     </environments>

+     <variants>

+       <ref id="ResilientStorage"/>

+       <ref id="Gluster"/>

+       <variant id="optional" name="optional" type="optional">

+         <arches>

+           <arch>x86_64</arch>

+           <arch>s390x</arch>

+         </arches>

+         <groups>

+           <group default="false">firefox</group>

+         </groups>

+       </variant>

+     </variants>

+   </variant>

+ 

+ </variants>

file modified
+52 -3
@@ -1,9 +1,14 @@

  # -*- coding: utf-8 -*-

  

  import mock

- import unittest

+ import os

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import tempfile

  import shutil

+ import errno

  

  from pungi.util import get_arch_variant_data

  from pungi import paths
@@ -14,7 +19,11 @@

          self.topdir = tempfile.mkdtemp()

  

      def tearDown(self):

-         shutil.rmtree(self.topdir)

+         try:

+             shutil.rmtree(self.topdir)

+         except OSError as err:

+             if err.errno != errno.ENOENT:

+                 raise

  

  

  class DummyCompose(object):
@@ -22,13 +31,18 @@

          self.supported = True

          self.compose_date = '20151203'

          self.compose_type_suffix = '.t'

+         self.compose_type = 'test'

          self.compose_respin = 0

          self.compose_id = 'Test-20151203.0.t'

+         self.compose_label = None

+         self.compose_label_major_version = None

+         self.image_release = '20151203.t.0'

          self.ci_base = mock.Mock(

              release_id='Test-1.0',

              release=mock.Mock(

                  short='test',

                  version='1.0',

+                 is_layered=False,

              ),

          )

          self.topdir = topdir
@@ -43,13 +57,18 @@

              'Everything': mock.Mock(uid='Everything', arches=['x86_64', 'amd64'],

                                      type='variant', is_empty=False),

          }

+         self.log_info = mock.Mock()

          self.log_error = mock.Mock()

          self.log_debug = mock.Mock()

+         self.log_warning = mock.Mock()

          self.get_image_name = mock.Mock(return_value='image-name')

          self.image = mock.Mock(path='Client/i386/iso/image.iso')

          self.im = mock.Mock(images={'Client': {'i386': [self.image]}})

+         self.old_composes = []

+         self.config_dir = '/home/releng/config'

+         self.notifier = None

  

-     def get_variants(self, arch=None, types=None):

+     def get_variants(self, arch=None, types=None, recursive=None):

          return [v for v in self.variants.values() if not arch or arch in v.arches]

  

      def can_fail(self, variant, arch, deliverable):
@@ -61,3 +80,33 @@

          for variant in self.variants.itervalues():

              result |= set(variant.arches)

          return result

+ 

+ 

+ def touch(path, content=None):

+     """Helper utility that creates an dummy file in given location. Directories

+     will be created."""

+     content = content or (path + '\n')

+     try:

+         os.makedirs(os.path.dirname(path))

+     except OSError:

+         pass

+     with open(path, 'w') as f:

+         f.write(content)

+ 

+ 

+ def copy_fixture(fixture_name, dest):

+     src = os.path.join(os.path.dirname(__file__), 'fixtures', fixture_name)

+     touch(dest)

+     shutil.copy2(src, dest)

+ 

+ 

+ def union(*args):

+     """Create a new dict as a union of all arguments."""

+     res = {}

+     for arg in args:

+         res.update(arg)

+     return res

+ 

+ 

+ def boom(*args, **kwargs):

+     raise Exception('BOOM')

file modified
+201 -28
@@ -2,7 +2,10 @@

  # -*- coding: utf-8 -*-

  

  

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import mock

  

  import os
@@ -10,8 +13,8 @@

  

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

  

- from pungi.phases.buildinstall import BuildinstallPhase, BuildinstallThread

- from tests.helpers import DummyCompose, PungiTestCase

+ from pungi.phases.buildinstall import BuildinstallPhase, BuildinstallThread, link_boot_iso

+ from tests.helpers import DummyCompose, PungiTestCase, touch, boom

  

  

  class BuildInstallCompose(DummyCompose):
@@ -57,7 +60,8 @@

              'release_short': 't',

              'release_version': '1',

              'release_is_layered': False,

-             'buildinstall_method': 'lorax'

+             'buildinstall_method': 'lorax',

+             'disc_types': {'dvd': 'DVD'},

          })

  

          get_volid.return_value = 'vol_id'
@@ -89,6 +93,11 @@

                         buildarch='amd64', is_final=True, nomacboot=True, noupgrade=True,

                         volid='vol_id', variant='Client', buildinstallpackages=[],

                         bugurl=None)])

+         self.assertItemsEqual(

+             get_volid.mock_calls,

+             [mock.call(compose, 'x86_64', variant=compose.variants['Server'], disc_type='DVD'),

+              mock.call(compose, 'amd64', variant=compose.variants['Client'], disc_type='DVD'),

+              mock.call(compose, 'amd64', variant=compose.variants['Server'], disc_type='DVD')])

  

      @mock.patch('pungi.phases.buildinstall.ThreadPool')

      @mock.patch('pungi.phases.buildinstall.LoraxWrapper')
@@ -122,6 +131,9 @@

                         volid='vol_id', variant='Client', buildinstallpackages=[],

                         bugurl=None)],

              any_order=True)

+         self.assertItemsEqual(

+             get_volid.mock_calls,

+             [mock.call(compose, 'amd64', variant=compose.variants['Client'], disc_type='dvd')])

  

      @mock.patch('pungi.phases.buildinstall.ThreadPool')

      @mock.patch('pungi.phases.buildinstall.LoraxWrapper')
@@ -133,7 +145,8 @@

              'release_short': 't',

              'release_version': '1',

              'release_is_layered': False,

-             'buildinstall_method': 'buildinstall'

+             'buildinstall_method': 'buildinstall',

+             'disc_types': {'dvd': 'DVD'},

          })

  

          get_volid.return_value = 'vol_id'
@@ -155,6 +168,10 @@

               mock.call('Test', '1', '1', self.topdir + '/work/amd64/repo',

                         self.topdir + '/work/amd64/buildinstall',

                         buildarch='amd64', is_final=True, volid='vol_id')])

+         self.assertItemsEqual(

+             get_volid.mock_calls,

+             [mock.call(compose, 'x86_64', disc_type='DVD'),

+              mock.call(compose, 'amd64', disc_type='DVD')])

  

      def test_global_upgrade_with_lorax(self):

          compose = BuildInstallCompose(self.topdir, {
@@ -169,7 +186,7 @@

              phase.validate()

  

          self.assertIn('Deprecated config option: buildinstall_upgrade_image',

-                       ctx.exception.message)

+                       str(ctx.exception))

  

      def test_lorax_options_with_buildinstall(self):

          compose = BuildInstallCompose(self.topdir, {
@@ -183,8 +200,8 @@

          with self.assertRaises(ValueError) as ctx:

              phase.validate()

  

-         self.assertIn('buildinstall', ctx.exception.message)

-         self.assertIn('lorax_options', ctx.exception.message)

+         self.assertIn('buildinstall', str(ctx.exception))

+         self.assertIn('lorax_options', str(ctx.exception))

  

      @mock.patch('pungi.phases.buildinstall.ThreadPool')

      @mock.patch('pungi.phases.buildinstall.LoraxWrapper')
@@ -237,6 +254,11 @@

                         buildarch='amd64', is_final=True, nomacboot=False, noupgrade=True,

                         volid='vol_id', variant='Client', buildinstallpackages=[],

                         bugurl=None)])

+         self.assertItemsEqual(

+             get_volid.mock_calls,

+             [mock.call(compose, 'x86_64', variant=compose.variants['Server'], disc_type='dvd'),

+              mock.call(compose, 'amd64', variant=compose.variants['Client'], disc_type='dvd'),

+              mock.call(compose, 'amd64', variant=compose.variants['Server'], disc_type='dvd')])

  

      @mock.patch('pungi.phases.buildinstall.ThreadPool')

      @mock.patch('pungi.phases.buildinstall.LoraxWrapper')
@@ -286,18 +308,23 @@

                         buildarch='amd64', is_final=True, nomacboot=True, noupgrade=False,

                         volid='vol_id', variant='Client', buildinstallpackages=[],

                         bugurl=None)])

+         self.assertItemsEqual(

+             get_volid.mock_calls,

+             [mock.call(compose, 'x86_64', variant=compose.variants['Server'], disc_type='dvd'),

+              mock.call(compose, 'amd64', variant=compose.variants['Client'], disc_type='dvd'),

+              mock.call(compose, 'amd64', variant=compose.variants['Server'], disc_type='dvd')])

  

  

  class TestCopyFiles(PungiTestCase):

  

-     @mock.patch('pungi.phases.buildinstall.symlink_boot_iso')

+     @mock.patch('pungi.phases.buildinstall.link_boot_iso')

      @mock.patch('pungi.phases.buildinstall.tweak_buildinstall')

      @mock.patch('pungi.phases.buildinstall.get_volid')

      @mock.patch('os.listdir')

      @mock.patch('os.path.isdir')

      @mock.patch('pungi.phases.buildinstall.get_kickstart_file')

      def test_copy_files_buildinstall(self, get_kickstart_file, isdir, listdir,

-                                      get_volid, tweak_buildinstall, symlink_boot_iso):

+                                      get_volid, tweak_buildinstall, link_boot_iso):

          compose = BuildInstallCompose(self.topdir, {

              'buildinstall_method': 'buildinstall'

          })
@@ -312,9 +339,9 @@

  

          self.assertItemsEqual(

              get_volid.mock_calls,

-             [mock.call(compose, 'x86_64', compose.variants['Server'], escape_spaces=False, disc_type='boot'),

-              mock.call(compose, 'amd64', compose.variants['Client'], escape_spaces=False, disc_type='boot'),

-              mock.call(compose, 'amd64', compose.variants['Server'], escape_spaces=False, disc_type='boot')])

+             [mock.call(compose, 'x86_64', compose.variants['Server'], escape_spaces=False, disc_type='dvd'),

+              mock.call(compose, 'amd64', compose.variants['Client'], escape_spaces=False, disc_type='dvd'),

+              mock.call(compose, 'amd64', compose.variants['Server'], escape_spaces=False, disc_type='dvd')])

          self.assertItemsEqual(

              tweak_buildinstall.mock_calls,

              [mock.call(self.topdir + '/work/x86_64/buildinstall',
@@ -327,19 +354,19 @@

                         self.topdir + '/compose/Client/amd64/os',

                         'amd64', 'Client', '', 'Client.amd64', 'kickstart')])

          self.assertItemsEqual(

-             symlink_boot_iso.mock_calls,

+             link_boot_iso.mock_calls,

              [mock.call(compose, 'x86_64', compose.variants['Server']),

               mock.call(compose, 'amd64', compose.variants['Client']),

               mock.call(compose, 'amd64', compose.variants['Server'])])

  

-     @mock.patch('pungi.phases.buildinstall.symlink_boot_iso')

+     @mock.patch('pungi.phases.buildinstall.link_boot_iso')

      @mock.patch('pungi.phases.buildinstall.tweak_buildinstall')

      @mock.patch('pungi.phases.buildinstall.get_volid')

      @mock.patch('os.listdir')

      @mock.patch('os.path.isdir')

      @mock.patch('pungi.phases.buildinstall.get_kickstart_file')

      def test_copy_files_lorax(self, get_kickstart_file, isdir, listdir,

-                               get_volid, tweak_buildinstall, symlink_boot_iso):

+                               get_volid, tweak_buildinstall, link_boot_iso):

          compose = BuildInstallCompose(self.topdir, {

              'buildinstall_method': 'lorax'

          })
@@ -354,9 +381,9 @@

  

          self.assertItemsEqual(

              get_volid.mock_calls,

-             [mock.call(compose, 'x86_64', compose.variants['Server'], escape_spaces=False, disc_type='boot'),

-              mock.call(compose, 'amd64', compose.variants['Client'], escape_spaces=False, disc_type='boot'),

-              mock.call(compose, 'amd64', compose.variants['Server'], escape_spaces=False, disc_type='boot')])

+             [mock.call(compose, 'x86_64', compose.variants['Server'], escape_spaces=False, disc_type='dvd'),

+              mock.call(compose, 'amd64', compose.variants['Client'], escape_spaces=False, disc_type='dvd'),

+              mock.call(compose, 'amd64', compose.variants['Server'], escape_spaces=False, disc_type='dvd')])

          self.assertItemsEqual(

              tweak_buildinstall.mock_calls,

              [mock.call(self.topdir + '/work/x86_64/buildinstall/Server',
@@ -369,11 +396,53 @@

                         self.topdir + '/compose/Client/amd64/os',

                         'amd64', 'Client', '', 'Client.amd64', 'kickstart')])

          self.assertItemsEqual(

-             symlink_boot_iso.mock_calls,

+             link_boot_iso.mock_calls,

              [mock.call(compose, 'x86_64', compose.variants['Server']),

               mock.call(compose, 'amd64', compose.variants['Client']),

               mock.call(compose, 'amd64', compose.variants['Server'])])

  

+     @mock.patch('pungi.phases.buildinstall.link_boot_iso')

+     @mock.patch('pungi.phases.buildinstall.tweak_buildinstall')

+     @mock.patch('pungi.phases.buildinstall.get_volid')

+     @mock.patch('os.listdir')

+     @mock.patch('os.path.isdir')

+     @mock.patch('pungi.phases.buildinstall.get_kickstart_file')

+     def test_copy_fail(self, get_kickstart_file, isdir, listdir,

+                        get_volid, tweak_buildinstall, link_boot_iso):

+         compose = BuildInstallCompose(self.topdir, {

+             'buildinstall_method': 'lorax',

+             'failable_deliverables': [

+                 ('^.+$', {'*': ['buildinstall']})

+             ],

+         })

+ 

+         get_volid.side_effect = (

+             lambda compose, arch, variant, escape_spaces, disc_type: "%s.%s" % (variant.uid, arch)

+         )

+         get_kickstart_file.return_value = 'kickstart'

+         tweak_buildinstall.side_effect = boom

+ 

+         phase = BuildinstallPhase(compose)

+         phase.copy_files()

+ 

+         self.assertItemsEqual(

+             get_volid.mock_calls,

+             [mock.call(compose, 'x86_64', compose.variants['Server'], escape_spaces=False, disc_type='dvd'),

+              mock.call(compose, 'amd64', compose.variants['Client'], escape_spaces=False, disc_type='dvd'),

+              mock.call(compose, 'amd64', compose.variants['Server'], escape_spaces=False, disc_type='dvd')])

+         self.assertItemsEqual(

+             tweak_buildinstall.mock_calls,

+             [mock.call(self.topdir + '/work/x86_64/buildinstall/Server',

+                        self.topdir + '/compose/Server/x86_64/os',

+                        'x86_64', 'Server', '', 'Server.x86_64', 'kickstart'),

+              mock.call(self.topdir + '/work/amd64/buildinstall/Server',

+                        self.topdir + '/compose/Server/amd64/os',

+                        'amd64', 'Server', '', 'Server.amd64', 'kickstart'),

+              mock.call(self.topdir + '/work/amd64/buildinstall/Client',

+                        self.topdir + '/compose/Client/amd64/os',

+                        'amd64', 'Client', '', 'Client.amd64', 'kickstart')])

+         self.assertItemsEqual(link_boot_iso.mock_calls, [])

+ 

  

  class BuildinstallThreadTestCase(PungiTestCase):

  
@@ -491,10 +560,10 @@

          with mock.patch('time.sleep'):

              t.process((compose, 'x86_64', None, cmd), 0)

  

-         pool.log_info.assert_has_calls([

-             mock.call('[BEGIN] Running buildinstall for arch x86_64'),

-             mock.call('[FAIL] Buildinstall for variant None arch x86_64 failed, but going on anyway.\n'

-                       'Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details.' % self.topdir)

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway.'),

+             mock.call('Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details.'

+                       % self.topdir)

          ])

  

      @mock.patch('pungi.phases.buildinstall.KojiWrapper')
@@ -527,12 +596,116 @@

          with mock.patch('time.sleep'):

              t.process((compose, 'x86_64', compose.variants['Server'], cmd), 0)

  

-         pool.log_info.assert_has_calls([

-             mock.call('[BEGIN] Running buildinstall for arch x86_64'),

-             mock.call('[FAIL] Buildinstall for variant Server arch x86_64 failed, but going on anyway.\n'

-                       'Runroot task failed: 1234. See %s/logs/x86_64/buildinstall-Server.x86_64.log for more details.' % self.topdir)

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Buildinstall (variant Server, arch x86_64) failed, but going on anyway.'),

+             mock.call('Runroot task failed: 1234. See %s/logs/x86_64/buildinstall-Server.x86_64.log for more details.' % self.topdir)

          ])

  

  

+ class TestSymlinkIso(PungiTestCase):

+ 

+     def setUp(self):

+         super(TestSymlinkIso, self).setUp()

+         self.compose = BuildInstallCompose(self.topdir, {})

+         os_tree = self.compose.paths.compose.os_tree('x86_64', self.compose.variants['Server'])

+         self.boot_iso_path = os.path.join(os_tree, "images", "boot.iso")

+         touch(self.boot_iso_path)

+ 

+     @mock.patch('pungi.phases.buildinstall.Image')

+     @mock.patch('pungi.phases.buildinstall.get_mtime')

+     @mock.patch('pungi.phases.buildinstall.get_file_size')

+     @mock.patch('pungi.phases.buildinstall.IsoWrapper')

+     @mock.patch('pungi.phases.buildinstall.run')

+     def test_hardlink(self, run, IsoWrapperCls, get_file_size, get_mtime, ImageCls):

+         self.compose.conf = {'buildinstall_symlink': False}

+         IsoWrapper = IsoWrapperCls.return_value

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+ 

+         link_boot_iso(self.compose, 'x86_64', self.compose.variants['Server'])

+ 

+         tgt = self.topdir + '/compose/Server/x86_64/iso/image-name'

+         self.assertTrue(os.path.isfile(tgt))

+         self.assertEqual(os.stat(tgt).st_ino,

+                          os.stat(self.topdir + '/compose/Server/x86_64/os/images/boot.iso').st_ino)

+ 

+         self.assertItemsEqual(

+             self.compose.get_image_name.mock_calls,

+             [mock.call('x86_64', self.compose.variants['Server'],

+                        disc_type='boot', disc_num=None, suffix='.iso')])

+         self.assertItemsEqual(IsoWrapper.get_implanted_md5.mock_calls,

+                               [mock.call(tgt)])

+         self.assertItemsEqual(IsoWrapper.get_manifest_cmd.mock_calls,

+                               [mock.call('image-name')])

+         self.assertItemsEqual(IsoWrapper.get_volume_id.mock_calls,

+                               [mock.call(tgt)])

+         self.assertItemsEqual(run.mock_calls,

+                               [mock.call(IsoWrapper.get_manifest_cmd.return_value,

+                                          workdir=self.topdir + '/compose/Server/x86_64/iso')])

+ 

+         image = ImageCls.return_value

+         self.assertEqual(image.path, 'Server/x86_64/iso/image-name')

+         self.assertEqual(image.mtime, 13579)

+         self.assertEqual(image.size, 1024)

+         self.assertEqual(image.arch, 'x86_64')

+         self.assertEqual(image.type, "boot")

+         self.assertEqual(image.format, "iso")

+         self.assertEqual(image.disc_number, 1)

+         self.assertEqual(image.disc_count, 1)

+         self.assertEqual(image.bootable, True)

+         self.assertEqual(image.implant_md5, IsoWrapper.get_implanted_md5.return_value)

+         self.assertEqual(self.compose.im.add.mock_calls,

+                          [mock.call('Server', 'x86_64', image)])

+ 

+     @mock.patch('pungi.phases.buildinstall.Image')

+     @mock.patch('pungi.phases.buildinstall.get_mtime')

+     @mock.patch('pungi.phases.buildinstall.get_file_size')

+     @mock.patch('pungi.phases.buildinstall.IsoWrapper')

+     @mock.patch('pungi.phases.buildinstall.run')

+     def test_hardlink_with_custom_type(self, run, IsoWrapperCls, get_file_size, get_mtime, ImageCls):

+         self.compose.conf = {

+             'buildinstall_symlink': False,

+             'disc_types': {'boot': 'netinst'},

+         }

+         IsoWrapper = IsoWrapperCls.return_value

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+ 

+         link_boot_iso(self.compose, 'x86_64', self.compose.variants['Server'])

+ 

+         tgt = self.topdir + '/compose/Server/x86_64/iso/image-name'

+         self.assertTrue(os.path.isfile(tgt))

+         self.assertEqual(os.stat(tgt).st_ino,

+                          os.stat(self.topdir + '/compose/Server/x86_64/os/images/boot.iso').st_ino)

+ 

+         self.assertItemsEqual(

+             self.compose.get_image_name.mock_calls,

+             [mock.call('x86_64', self.compose.variants['Server'],

+                        disc_type='netinst', disc_num=None, suffix='.iso')])

+         self.assertItemsEqual(IsoWrapper.get_implanted_md5.mock_calls,

+                               [mock.call(tgt)])

+         self.assertItemsEqual(IsoWrapper.get_manifest_cmd.mock_calls,

+                               [mock.call('image-name')])

+         self.assertItemsEqual(IsoWrapper.get_volume_id.mock_calls,

+                               [mock.call(tgt)])

+         self.assertItemsEqual(run.mock_calls,

+                               [mock.call(IsoWrapper.get_manifest_cmd.return_value,

+                                          workdir=self.topdir + '/compose/Server/x86_64/iso')])

+ 

+         image = ImageCls.return_value

+         self.assertEqual(image.path, 'Server/x86_64/iso/image-name')

+         self.assertEqual(image.mtime, 13579)

+         self.assertEqual(image.size, 1024)

+         self.assertEqual(image.arch, 'x86_64')

+         self.assertEqual(image.type, "boot")

+         self.assertEqual(image.format, "iso")

+         self.assertEqual(image.disc_number, 1)

+         self.assertEqual(image.disc_count, 1)

+         self.assertEqual(image.bootable, True)

+         self.assertEqual(image.implant_md5, IsoWrapper.get_implanted_md5.return_value)

+         self.assertEqual(self.compose.im.add.mock_calls,

+                          [mock.call('Server', 'x86_64', image)])

+ 

+ 

  if __name__ == "__main__":

      unittest.main()

file added
+181
@@ -0,0 +1,181 @@

+ #!/usr/bin/env python2

+ # -*- coding: utf-8 -*-

+ 

+ import mock

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

+ import os

+ import sys

+ import StringIO

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

+ 

+ from pungi import checks

+ 

+ 

+ class CheckDependenciesTestCase(unittest.TestCase):

+ 

+     def dont_find(self, paths):

+         return lambda path: path not in paths

+ 

+     def test_all_deps_missing(self):

+         def custom_exists(path):

+             return False

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('os.path.exists') as exists:

+                 exists.side_effect = custom_exists

+                 result = checks.check({})

+ 

+         self.assertGreater(len(out.getvalue().strip().split('\n')), 1)

+         self.assertFalse(result)

+ 

+     def test_all_deps_ok(self):

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('platform.machine') as machine:

+                 machine.return_value = 'x86_64'

+                 with mock.patch('os.path.exists') as exists:

+                     exists.side_effect = self.dont_find([])

+                     result = checks.check({})

+ 

+         self.assertEqual('', out.getvalue())

+         self.assertTrue(result)

+ 

+     def test_does_not_require_jigdo_if_not_configured(self):

+         conf = {

+             'create_jigdo': False

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('platform.machine') as machine:

+                 machine.return_value = 'x86_64'

+                 with mock.patch('os.path.exists') as exists:

+                     exists.side_effect = self.dont_find(['/usr/bin/jigdo-lite'])

+                     result = checks.check(conf)

+ 

+         self.assertEqual('', out.getvalue())

+         self.assertTrue(result)

+ 

+     def test_isohybrid_not_required_without_productimg_phase(self):

+         conf = {

+             'bootable': True,

+             'productimg': False,

+             'runroot': True,

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('os.path.exists') as exists:

+                 exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

+                 result = checks.check(conf)

+ 

+         self.assertEqual('', out.getvalue())

+         self.assertTrue(result)

+ 

+     def test_isohybrid_not_required_on_not_bootable(self):

+         conf = {

+             'bootable': False,

+             'runroot': True,

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('os.path.exists') as exists:

+                 exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

+                 result = checks.check(conf)

+ 

+         self.assertEqual('', out.getvalue())

+         self.assertTrue(result)

+ 

+     def test_isohybrid_not_required_on_arm(self):

+         conf = {

+             'bootable': True,

+             'productimg': True,

+             'runroot': True,

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('platform.machine') as machine:

+                 machine.return_value = 'armhfp'

+                 with mock.patch('os.path.exists') as exists:

+                     exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

+                     result = checks.check(conf)

+ 

+         self.assertRegexpMatches(out.getvalue(), r'^Not checking.*Expect failures.*$')

+         self.assertTrue(result)

+ 

+     def test_isohybrid_not_needed_in_runroot(self):

+         conf = {

+             'runroot': True,

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('os.path.exists') as exists:

+                 exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

+                 result = checks.check(conf)

+ 

+         self.assertEqual('', out.getvalue())

+         self.assertTrue(result)

+ 

+     def test_genisoimg_not_needed_in_runroot(self):

+         conf = {

+             'runroot': True,

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('os.path.exists') as exists:

+                 exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])

+                 result = checks.check(conf)

+ 

+         self.assertEqual('', out.getvalue())

+         self.assertTrue(result)

+ 

+     def test_genisoimg_needed_for_productimg(self):

+         conf = {

+             'runroot': True,

+             'productimg': True,

+             'bootable': True,

+         }

+ 

+         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+             with mock.patch('os.path.exists') as exists:

+                 exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])

+                 result = checks.check(conf)

+ 

+         self.assertIn('genisoimage', out.getvalue())

+         self.assertFalse(result)

+ 

+ 

+ class TestUmask(unittest.TestCase):

+     def setUp(self):

+         self.orig_umask = os.umask(0)

+         os.umask(self.orig_umask)

+ 

+     def tearDown(self):

+         os.umask(self.orig_umask)

+ 

+     def test_no_warning_with_0022(self):

+         os.umask(0o022)

+         logger = mock.Mock()

+         checks.check_umask(logger)

+         self.assertItemsEqual(logger.mock_calls, [])

+ 

+     def test_no_warning_with_0000(self):

+         os.umask(0o000)

+         logger = mock.Mock()

+         checks.check_umask(logger)

+         self.assertItemsEqual(logger.mock_calls, [])

+ 

+     def test_warning_with_0044(self):

+         os.umask(0o044)

+         logger = mock.Mock()

+         checks.check_umask(logger)

+         self.assertItemsEqual(

+             logger.mock_calls,

+             [mock.call.warning('Unusually strict umask detected (0%03o), '

+                                'expect files with broken permissions.', 0o044)]

+         )

+ 

+ 

+ if __name__ == "__main__":

+     unittest.main()

file modified
+214 -1
@@ -2,7 +2,10 @@

  # -*- coding: utf-8 -*-

  

  import mock

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import os

  import sys

  import tempfile
@@ -13,6 +16,12 @@

  from pungi.compose import Compose

  

  

+ class ConfigWrapper(dict):

+     def __init__(self, *args, **kwargs):

+         super(ConfigWrapper, self).__init__(*args, **kwargs)

+         self._open_file = '%s/fixtures/config.conf' % os.path.abspath(os.path.dirname(__file__))

+ 

+ 

  class ComposeTestCase(unittest.TestCase):

      def setUp(self):

          self.tmp_dir = tempfile.mkdtemp()
@@ -71,6 +80,210 @@

                                           'RC-1.0', '1', 'rel_short', '2', '.iso', 'nightly',

                                           '.n', 'Server', '3.0']))

  

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_get_image_name_type_netinst(self, ci):

+         conf = {}

+         variant = mock.Mock(uid='Server', type='variant')

+         ci.return_value.compose.respin = 2

+         ci.return_value.compose.id = 'compose_id'

+         ci.return_value.compose.date = '20160107'

+         ci.return_value.compose.type = 'nightly'

+         ci.return_value.compose.type_suffix = '.n'

+         ci.return_value.compose.label = 'RC-1.0'

+         ci.return_value.compose.label_major_version = '1'

+ 

+         ci.return_value.release.version = '3.0'

+         ci.return_value.release.short = 'rel_short'

+ 

+         compose = Compose(conf, self.tmp_dir)

+ 

+         keys = ['arch', 'compose_id', 'date', 'disc_num', 'disc_type',

+                 'label', 'label_major_version', 'release_short', 'respin',

+                 'suffix', 'type', 'type_suffix', 'variant', 'version']

+         format = '-'.join(['%(' + k + ')s' for k in keys])

+         name = compose.get_image_name('x86_64', variant, format=format,

+                                       disc_num=7, disc_type='netinst', suffix='.iso')

+ 

+         self.assertEqual(name, '-'.join(['x86_64', 'compose_id', '20160107', '7', 'netinst',

+                                          'RC-1.0', '1', 'rel_short', '2', '.iso', 'nightly',

+                                          '.n', 'Server', '3.0']))

+ 

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_image_release(self, ci):

+         conf = {}

+         ci.return_value.compose.respin = 2

+         ci.return_value.compose.date = '20160107'

+         ci.return_value.compose.type = 'nightly'

+         ci.return_value.compose.type_suffix = '.n'

+ 

+         compose = Compose(conf, self.tmp_dir)

+ 

+         self.assertEqual(compose.image_release, '20160107.n.2')

+ 

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_image_release_production(self, ci):

+         conf = {}

+         ci.return_value.compose.respin = 2

+         ci.return_value.compose.date = '20160107'

+         ci.return_value.compose.type = 'production'

+         ci.return_value.compose.type_suffix = '.n'

+ 

+         compose = Compose(conf, self.tmp_dir)

+ 

+         self.assertEqual(compose.image_release, '20160107.n.2')

+ 

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_get_variant_arches_without_filter(self, ci):

+         conf = ConfigWrapper(

+             variants_file={'scm': 'file',

+                            'repo': None,

+                            'file': 'variants.xml'},

+             release_name='Test',

+             release_version='1.0',

+             release_short='test',

+         )

+ 

+         compose = Compose(conf, self.tmp_dir)

+         compose.read_variants()

+ 

+         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+                          ['Client', 'Crashy', 'Live', 'Server'])

+         self.assertEqual(sorted([v.uid for v in compose.variants['Server'].variants.itervalues()]),

+                          ['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])

+         self.assertItemsEqual(compose.variants['Client'].arches,

+                               ['i386', 'x86_64'])

+         self.assertItemsEqual(compose.variants['Crashy'].arches,

+                               ['ppc64le'])

+         self.assertItemsEqual(compose.variants['Live'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].arches,

+                               ['s390x', 'x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['Gluster'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['ResilientStorage'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['optional'].arches,

+                               ['s390x', 'x86_64'])

+ 

+         self.assertEqual([v.uid for v in compose.get_variants()],

+                          ['Client', 'Crashy', 'Live', 'Server', 'Server-Gluster',

+                           'Server-ResilientStorage', 'Server-optional'])

+         self.assertEqual(compose.get_arches(), ['i386', 'ppc64le', 's390x', 'x86_64'])

+ 

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_get_variant_arches_with_arch_filter(self, ci):

+         conf = ConfigWrapper(

+             variants_file={'scm': 'file',

+                            'repo': None,

+                            'file': 'variants.xml'},

+             release_name='Test',

+             release_version='1.0',

+             release_short='test',

+             tree_arches=['x86_64'],

+         )

+ 

+         compose = Compose(conf, self.tmp_dir)

+         compose.read_variants()

+ 

+         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+                          ['Client', 'Live', 'Server'])

+         self.assertEqual(sorted([v.uid for v in compose.variants['Server'].variants.itervalues()]),

+                          ['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])

+         self.assertItemsEqual(compose.variants['Client'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Live'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['Gluster'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['ResilientStorage'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['optional'].arches,

+                               ['x86_64'])

+ 

+         self.assertEqual(compose.get_arches(), ['x86_64'])

+         self.assertEqual([v.uid for v in compose.get_variants()],

+                          ['Client', 'Live', 'Server', 'Server-Gluster',

+                           'Server-ResilientStorage', 'Server-optional'])

+ 

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_get_variant_arches_with_variant_filter(self, ci):

+         ci.return_value.compose.respin = 2

+         ci.return_value.compose.date = '20160107'

+         ci.return_value.compose.type = 'production'

+         ci.return_value.compose.type_suffix = '.n'

+ 

+         conf = ConfigWrapper(

+             variants_file={'scm': 'file',

+                            'repo': None,

+                            'file': 'variants.xml'},

+             release_name='Test',

+             release_version='1.0',

+             release_short='test',

+             tree_variants=['Server', 'Client', 'Server-Gluster'],

+         )

+ 

+         compose = Compose(conf, self.tmp_dir)

+         compose.read_variants()

+ 

+         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+                          ['Client', 'Server'])

+         self.assertItemsEqual(compose.variants['Client'].arches,

+                               ['i386', 'x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].arches,

+                               ['s390x', 'x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['Gluster'].arches,

+                               ['x86_64'])

+ 

+         self.assertEqual(compose.get_arches(), ['i386', 's390x', 'x86_64'])

+         self.assertEqual([v.uid for v in compose.get_variants()],

+                          ['Client', 'Server', 'Server-Gluster'])

+ 

+     @mock.patch('pungi.compose.ComposeInfo')

+     def test_get_variant_arches_with_both_filters(self, ci):

+         ci.return_value.compose.respin = 2

+         ci.return_value.compose.date = '20160107'

+         ci.return_value.compose.type = 'production'

+         ci.return_value.compose.type_suffix = '.n'

+ 

+         logger = mock.Mock()

+ 

+         conf = ConfigWrapper(

+             variants_file={'scm': 'file',

+                            'repo': None,

+                            'file': 'variants.xml'},

+             release_name='Test',

+             release_version='1.0',

+             release_short='test',

+             tree_variants=['Server', 'Client', 'Server-optional'],

+             tree_arches=['x86_64'],

+         )

+ 

+         compose = Compose(conf, self.tmp_dir, logger=logger)

+         compose.read_variants()

+ 

+         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+                          ['Client', 'Server'])

+         self.assertItemsEqual(compose.variants['Client'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].arches,

+                               ['x86_64'])

+         self.assertItemsEqual(compose.variants['Server'].variants['optional'].arches,

+                               ['x86_64'])

+ 

+         self.assertEqual(compose.get_arches(), ['x86_64'])

+         self.assertEqual([v.uid for v in compose.get_variants()],

+                          ['Client', 'Server', 'Server-optional'])

+ 

+         self.assertItemsEqual(

+             logger.info.call_args_list,

+             [mock.call('Excluding variant Live: filtered by configuration.'),

+              mock.call('Excluding variant Crashy: all its arches are filtered.'),

+              mock.call('Excluding variant Server-ResilientStorage: filtered by configuration.'),

+              mock.call('Excluding variant Server-Gluster: filtered by configuration.')]

+         )

+ 

  

  class StatusTest(unittest.TestCase):

      def setUp(self):

@@ -0,0 +1,491 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ 

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

+ 

+ from tests import helpers

+ from pungi.phases import createiso

+ 

+ 

+ class CreateisoPhaseTest(helpers.PungiTestCase):

+ 

+     @mock.patch('pungi.phases.createiso.ThreadPool')

+     def test_skip_all(self, ThreadPool):

+         compose = helpers.DummyCompose(self.topdir, {

+             'createiso_skip': [

+                 ('^.*$', {'*': True, 'src': True})

+             ]

+         })

+ 

+         pool = ThreadPool.return_value

+ 

+         phase = createiso.CreateisoPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(len(pool.add.call_args_list), 0)

+         self.assertEqual(pool.queue_put.call_args_list, [])

+ 

+     @mock.patch('pungi.phases.createiso.ThreadPool')

+     def test_nothing_happens_without_rpms(self, ThreadPool):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'createiso_skip': [

+             ]

+         })

+ 

+         pool = ThreadPool.return_value

+ 

+         phase = createiso.CreateisoPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(len(pool.add.call_args_list), 0)

+         self.assertEqual(pool.queue_put.call_args_list, [])

+         self.assertItemsEqual(

+             compose.log_warning.call_args_list,

+             [mock.call('No RPMs found for Everything.x86_64, skipping ISO'),

+              mock.call('No RPMs found for Everything.amd64, skipping ISO'),

+              mock.call('No RPMs found for Everything.src, skipping ISO'),

+              mock.call('No RPMs found for Client.amd64, skipping ISO'),

+              mock.call('No RPMs found for Client.src, skipping ISO'),

+              mock.call('No RPMs found for Server.x86_64, skipping ISO'),

+              mock.call('No RPMs found for Server.amd64, skipping ISO'),

+              mock.call('No RPMs found for Server.src, skipping ISO')]

+         )

+ 

+     @mock.patch('pungi.phases.createiso.prepare_iso')

+     @mock.patch('pungi.phases.createiso.split_iso')

+     @mock.patch('pungi.phases.createiso.ThreadPool')

+     def test_start_one_worker(self, ThreadPool, split_iso, prepare_iso):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'createiso_skip': [

+             ]

+         })

+         helpers.touch(os.path.join(

+             compose.paths.compose.os_tree('x86_64', compose.variants['Server']),

+             'dummy.rpm'))

+         disc_data = mock.Mock()

+         split_iso.return_value = [disc_data]

+         prepare_iso.return_value = 'dummy-graft-points'

+ 

+         pool = ThreadPool.return_value

+ 

+         phase = createiso.CreateisoPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(prepare_iso.call_args_list,

+                          [mock.call(compose, 'x86_64', compose.variants['Server'],

+                                     disc_count=1, disc_num=1, split_iso_data=disc_data)])

+         self.assertEqual(split_iso.call_args_list,

+                          [mock.call(compose, 'x86_64', compose.variants['Server'])])

+         self.assertEqual(len(pool.add.call_args_list), 1)

+         self.maxDiff = None

+         self.assertItemsEqual(

+             pool.queue_put.call_args_list,

+             [mock.call((

+                 compose,

+                 {

+                     'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+                     'bootable': False,

+                     'cmd': ['pungi-createiso',

+                             '--output-dir=%s/compose/Server/x86_64/iso' % self.topdir,

+                             '--iso-name=image-name', '--volid=test-1.0 Server.x86_64',

+                             '--graft-points=dummy-graft-points',

+                             '--arch=x86_64', '--supported',

+                             '--jigdo-dir=%s/compose/Server/x86_64/jigdo' % self.topdir,

+                             '--os-tree=%s/compose/Server/x86_64/os' % self.topdir],

+                     'label': '',

+                     'disc_num': 1,

+                     'disc_count': 1,

+                 },

+                 compose.variants['Server'],

+                 'x86_64'

+             ))]

+         )

+ 

+     @mock.patch('pungi.phases.createiso.prepare_iso')

+     @mock.patch('pungi.phases.createiso.split_iso')

+     @mock.patch('pungi.phases.createiso.ThreadPool')

+     def test_bootable(self, ThreadPool, split_iso, prepare_iso):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'buildinstall_method': 'lorax',

+             'bootable': True,

+             'createiso_skip': [

+             ]

+         })

+         helpers.touch(os.path.join(

+             compose.paths.compose.os_tree('x86_64', compose.variants['Server']),

+             'dummy.rpm'))

+         helpers.touch(os.path.join(

+             compose.paths.compose.os_tree('src', compose.variants['Server']),

+             'dummy.rpm'))

+         disc_data = mock.Mock()

+         split_iso.return_value = [disc_data]

+         prepare_iso.return_value = 'dummy-graft-points'

+ 

+         pool = ThreadPool.return_value

+ 

+         phase = createiso.CreateisoPhase(compose)

+         phase.run()

+ 

+         self.assertItemsEqual(

+             prepare_iso.call_args_list,

+             [mock.call(compose, 'x86_64', compose.variants['Server'],

+                        disc_count=1, disc_num=1, split_iso_data=disc_data),

+              mock.call(compose, 'src', compose.variants['Server'],

+                        disc_count=1, disc_num=1, split_iso_data=disc_data)])

+         self.assertItemsEqual(

+             split_iso.call_args_list,

+             [mock.call(compose, 'x86_64', compose.variants['Server']),

+              mock.call(compose, 'src', compose.variants['Server'])])

+         self.assertEqual(len(pool.add.call_args_list), 2)

+         self.maxDiff = None

+         self.assertItemsEqual(

+             pool.queue_put.call_args_list,

+             [mock.call((compose,

+                         {'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+                          'bootable': True,

+                          'cmd': ['pungi-createiso',

+                                  '--output-dir=%s/compose/Server/x86_64/iso' % self.topdir,

+                                  '--iso-name=image-name', '--volid=test-1.0 Server.x86_64',

+                                  '--graft-points=dummy-graft-points',

+                                  '--arch=x86_64',

+                                  '--buildinstall-method=lorax',

+                                  '--supported',

+                                  '--jigdo-dir=%s/compose/Server/x86_64/jigdo' % self.topdir,

+                                  '--os-tree=%s/compose/Server/x86_64/os' % self.topdir],

+                          'label': '',

+                          'disc_num': 1,

+                          'disc_count': 1},

+                         compose.variants['Server'],

+                         'x86_64')),

+              mock.call((compose,

+                         {'iso_path': '%s/compose/Server/source/iso/image-name' % self.topdir,

+                          'bootable': False,

+                          'cmd': ['pungi-createiso',

+                                  '--output-dir=%s/compose/Server/source/iso' % self.topdir,

+                                  '--iso-name=image-name', '--volid=test-1.0 Server.src',

+                                  '--graft-points=dummy-graft-points',

+                                  '--arch=src', '--supported',

+                                  '--jigdo-dir=%s/compose/Server/source/jigdo' % self.topdir,

+                                  '--os-tree=%s/compose/Server/source/tree' % self.topdir],

+                          'label': '',

+                          'disc_num': 1,

+                          'disc_count': 1},

+                         compose.variants['Server'],

+                         'src'))]

+         )

+ 

+ 

+ class CreateisoThreadTest(helpers.PungiTestCase):

+ 

+     @mock.patch('pungi.phases.createiso.IsoWrapper')

+     @mock.patch('pungi.phases.createiso.get_mtime')

+     @mock.patch('pungi.phases.createiso.get_file_size')

+     @mock.patch('pungi.phases.createiso.KojiWrapper')

+     def test_process_in_runroot(self, KojiWrapper, get_file_size, get_mtime, IsoWrapper):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'runroot': True,

+             'runroot_tag': 'f25-build',

+             'koji_profile': 'koji',

+         })

+         cmd = {

+             'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+             'bootable': False,

+             'cmd': mock.Mock(),

+             'label': '',

+             'disc_num': 1,

+             'disc_count': 1,

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+         getTag = KojiWrapper.return_value.koji_proxy.getTag

+         getTag.return_value = {'arches': 'x86_64'}

+         get_runroot_cmd = KojiWrapper.return_value.get_runroot_cmd

+         run_runroot = KojiWrapper.return_value.run_runroot_cmd

+         run_runroot.return_value = {

+             'retcode': 0,

+             'output': 'whatever',

+             'task_id': 1234,

+         }

+ 

+         t = createiso.CreateIsoThread(mock.Mock())

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd, compose.variants['Server'], 'x86_64'), 1)

+ 

+         self.assertEqual(getTag.call_args_list, [mock.call('f25-build')])

+         self.assertEqual(get_runroot_cmd.call_args_list,

+                          [mock.call('f25-build', 'x86_64', cmd['cmd'], channel=None,

+                                     mounts=[self.topdir],

+                                     packages=['coreutils', 'genisoimage', 'isomd5sum',

+                                               'jigdo', 'pungi'],

+                                     task_id=True, use_shell=True)])

+         self.assertEqual(

+             run_runroot.call_args_list,

+             [mock.call(get_runroot_cmd.return_value,

+                        log_file='%s/logs/x86_64/createiso-image-name.x86_64.log' % self.topdir)])

+         self.assertEqual(IsoWrapper.return_value.get_implanted_md5.call_args_list,

+                          [mock.call(cmd['iso_path'])])

+         self.assertEqual(IsoWrapper.return_value.get_volume_id.call_args_list,

+                          [mock.call(cmd['iso_path'])])

+ 

+         self.assertEqual(len(compose.im.add.call_args_list), 1)

+         args, _ = compose.im.add.call_args_list[0]

+         self.assertEqual(args[0], 'Server')

+         self.assertEqual(args[1], 'x86_64')

+         image = args[2]

+         self.assertEqual(image.arch, 'x86_64')

+         self.assertEqual(image.path, 'Server/x86_64/iso/image-name')

+         self.assertEqual(image.format, 'iso')

+         self.assertEqual(image.type, 'dvd')

+         self.assertEqual(image.subvariant, 'Server')

+ 

+     @mock.patch('pungi.phases.createiso.IsoWrapper')

+     @mock.patch('pungi.phases.createiso.get_mtime')

+     @mock.patch('pungi.phases.createiso.get_file_size')

+     @mock.patch('pungi.phases.createiso.KojiWrapper')

+     def test_process_bootable(self, KojiWrapper, get_file_size, get_mtime, IsoWrapper):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'runroot': True,

+             'bootable': True,

+             'buildinstall_method': 'lorax',

+             'runroot_tag': 'f25-build',

+             'koji_profile': 'koji',

+         })

+         cmd = {

+             'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+             'bootable': True,

+             'cmd': mock.Mock(),

+             'label': '',

+             'disc_num': 1,

+             'disc_count': 1,

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+         getTag = KojiWrapper.return_value.koji_proxy.getTag

+         getTag.return_value = {'arches': 'x86_64'}

+         get_runroot_cmd = KojiWrapper.return_value.get_runroot_cmd

+         run_runroot = KojiWrapper.return_value.run_runroot_cmd

+         run_runroot.return_value = {

+             'retcode': 0,

+             'output': 'whatever',

+             'task_id': 1234,

+         }

+ 

+         t = createiso.CreateIsoThread(mock.Mock())

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd, compose.variants['Server'], 'x86_64'), 1)

+ 

+         self.assertEqual(getTag.call_args_list, [mock.call('f25-build')])

+         self.assertEqual(get_runroot_cmd.call_args_list,

+                          [mock.call('f25-build', 'x86_64', cmd['cmd'], channel=None,

+                                     mounts=[self.topdir],

+                                     packages=['coreutils', 'genisoimage', 'isomd5sum',

+                                               'jigdo', 'pungi', 'lorax'],

+                                     task_id=True, use_shell=True)])

+         self.assertEqual(

+             run_runroot.call_args_list,

+             [mock.call(get_runroot_cmd.return_value,

+                        log_file='%s/logs/x86_64/createiso-image-name.x86_64.log' % self.topdir)])

+         self.assertEqual(IsoWrapper.return_value.get_implanted_md5.call_args_list,

+                          [mock.call(cmd['iso_path'])])

+         self.assertEqual(IsoWrapper.return_value.get_volume_id.call_args_list,

+                          [mock.call(cmd['iso_path'])])

+ 

+         self.assertEqual(len(compose.im.add.call_args_list), 1)

+         args, _ = compose.im.add.call_args_list[0]

+         self.assertEqual(args[0], 'Server')

+         self.assertEqual(args[1], 'x86_64')

+         image = args[2]

+         self.assertEqual(image.arch, 'x86_64')

+         self.assertEqual(image.path, 'Server/x86_64/iso/image-name')

+         self.assertEqual(image.format, 'iso')

+         self.assertEqual(image.type, 'dvd')

+         self.assertEqual(image.subvariant, 'Server')

+ 

+     @mock.patch('pungi.phases.createiso.IsoWrapper')

+     @mock.patch('pungi.phases.createiso.get_mtime')

+     @mock.patch('pungi.phases.createiso.get_file_size')

+     @mock.patch('pungi.phases.createiso.KojiWrapper')

+     def test_process_in_runroot_crash(self, KojiWrapper, get_file_size, get_mtime, IsoWrapper):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'runroot': True,

+             'runroot_tag': 'f25-build',

+             'koji_profile': 'koji',

+             'failable_deliverables': [

+                 ('^.*$', {'*': 'iso'})

+             ]

+         })

+         cmd = {

+             'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+             'bootable': False,

+             'cmd': mock.Mock(),

+             'label': '',

+             'disc_num': 1,

+             'disc_count': 1,

+         }

+         getTag = KojiWrapper.return_value.koji_proxy.getTag

+         getTag.return_value = {'arches': 'x86_64'}

+         run_runroot = KojiWrapper.return_value.run_runroot_cmd

+         run_runroot.side_effect = helpers.boom

+ 

+         t = createiso.CreateIsoThread(mock.Mock())

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd, compose.variants['Server'], 'x86_64'), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Creating ISO (variant Server, arch x86_64) failed, but going on anyway.'),

+             mock.call('BOOM')

+         ])

+ 

+     @mock.patch('pungi.phases.createiso.IsoWrapper')

+     @mock.patch('pungi.phases.createiso.get_mtime')

+     @mock.patch('pungi.phases.createiso.get_file_size')

+     @mock.patch('pungi.phases.createiso.KojiWrapper')

+     def test_process_in_runroot_fail(self, KojiWrapper, get_file_size, get_mtime, IsoWrapper):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'runroot': True,

+             'runroot_tag': 'f25-build',

+             'koji_profile': 'koji',

+             'failable_deliverables': [

+                 ('^.*$', {'*': 'iso'})

+             ]

+         })

+         cmd = {

+             'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+             'bootable': False,

+             'cmd': mock.Mock(),

+             'label': '',

+             'disc_num': 1,

+             'disc_count': 1,

+         }

+         getTag = KojiWrapper.return_value.koji_proxy.getTag

+         getTag.return_value = {'arches': 'x86_64'}

+         run_runroot = KojiWrapper.return_value.run_runroot_cmd

+         run_runroot.return_value = {

+             'retcode': 1,

+             'output': 'Nope',

+             'task_id': '1234',

+         }

+ 

+         t = createiso.CreateIsoThread(mock.Mock())

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd, compose.variants['Server'], 'x86_64'), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Creating ISO (variant Server, arch x86_64) failed, but going on anyway.'),

+             mock.call('Runroot task failed: 1234. See %s for more details.'

+                       % (self.topdir + '/logs/x86_64/createiso-image-name.x86_64.log'))

+         ])

+ 

+     @mock.patch('pungi.phases.createiso.IsoWrapper')

+     @mock.patch('pungi.phases.createiso.get_mtime')

+     @mock.patch('pungi.phases.createiso.get_file_size')

+     @mock.patch('pungi.phases.createiso.run')

+     @mock.patch('pungi.phases.createiso.KojiWrapper')

+     def test_process_locally(self, KojiWrapper, run, get_file_size, get_mtime, IsoWrapper):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'runroot': False,

+         })

+         cmd = {

+             'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+             'bootable': False,

+             'cmd': mock.Mock(),

+             'label': '',

+             'disc_num': 1,

+             'disc_count': 1,

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+ 

+         t = createiso.CreateIsoThread(mock.Mock())

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd, compose.variants['Server'], 'x86_64'), 1)

+ 

+         self.assertEqual(KojiWrapper.return_value.mock_calls, [])

+         self.assertEqual(

+             run.call_args_list,

+             [mock.call(cmd['cmd'], show_cmd=True,

+                        logfile='%s/logs/x86_64/createiso-image-name.x86_64.log' % self.topdir)])

+         self.assertEqual(IsoWrapper.return_value.get_implanted_md5.call_args_list,

+                          [mock.call(cmd['iso_path'])])

+         self.assertEqual(IsoWrapper.return_value.get_volume_id.call_args_list,

+                          [mock.call(cmd['iso_path'])])

+ 

+         self.assertEqual(len(compose.im.add.call_args_list), 1)

+         args, _ = compose.im.add.call_args_list[0]

+         self.assertEqual(args[0], 'Server')

+         self.assertEqual(args[1], 'x86_64')

+         image = args[2]

+         self.assertEqual(image.arch, 'x86_64')

+         self.assertEqual(image.path, 'Server/x86_64/iso/image-name')

+         self.assertEqual(image.format, 'iso')

+         self.assertEqual(image.type, 'dvd')

+         self.assertEqual(image.subvariant, 'Server')

+ 

+     @mock.patch('pungi.phases.createiso.run')

+     @mock.patch('pungi.phases.createiso.KojiWrapper')

+     def test_process_locally_crash(self, KojiWrapper, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_short': 'test',

+             'release_version': '1.0',

+             'release_is_layered': False,

+             'runroot': False,

+             'failable_deliverables': [

+                 ('^.*$', {'*': 'iso'})

+             ]

+         })

+         cmd = {

+             'iso_path': '%s/compose/Server/x86_64/iso/image-name' % self.topdir,

+             'bootable': False,

+             'cmd': mock.Mock(),

+             'label': '',

+             'disc_num': 1,

+             'disc_count': 1,

+         }

+         run.side_effect = helpers.boom

+ 

+         t = createiso.CreateIsoThread(mock.Mock())

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd, compose.variants['Server'], 'x86_64'), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Creating ISO (variant Server, arch x86_64) failed, but going on anyway.'),

+             mock.call('BOOM')

+         ])

+ 

+ 

+ if __name__ == '__main__':

+     unittest.main()

@@ -0,0 +1,240 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

+ 

+ from tests import helpers

+ from pungi import createiso

+ 

+ 

+ class CreateIsoScriptTest(helpers.PungiTestCase):

+ 

+     def assertEqualCalls(self, actual, expected):

+         self.assertEqual(len(actual), len(expected))

+         for x, y in zip(actual, expected):

+             self.assertEqual(x, y)

+ 

+     def setUp(self):

+         super(CreateIsoScriptTest, self).setUp()

+         self.outdir = os.path.join(self.topdir, 'isos')

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_minimal_run(self, run):

+         createiso.main([

+             '--output-dir=%s' % self.outdir,

+             '--iso-name=DP-1.0-20160405.t.3-x86_64.iso',

+             '--volid=DP-1.0-20160405.t.3',

+             '--graft-points=graft-list',

+             '--arch=x86_64',

+         ])

+         self.maxDiff = None

+         self.assertEqual(

+             run.call_args_list,

+             [mock.call(['/usr/bin/genisoimage', '-untranslated-filenames',

+                         '-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',

+                         '-rational-rock', '-translation-table',

+                         '-input-charset', 'utf-8', '-x', './lost+found',

+                         '-o', 'DP-1.0-20160405.t.3-x86_64.iso',

+                         '-graft-points', '-path-list', 'graft-list'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call('isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest',

+                        show_cmd=True, stdout=True, workdir=self.outdir)]

+         )

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_bootable_run(self, run):

+         run.return_value = (0, '/usr/share/lorax')

+ 

+         createiso.main([

+             '--output-dir=%s' % self.outdir,

+             '--iso-name=DP-1.0-20160405.t.3-x86_64.iso',

+             '--volid=DP-1.0-20160405.t.3',

+             '--graft-points=graft-list',

+             '--arch=x86_64',

+             '--buildinstall-method=lorax',

+         ])

+ 

+         self.maxDiff = None

+         self.assertItemsEqual(

+             run.call_args_list,

+             [mock.call(['/usr/bin/genisoimage', '-untranslated-filenames',

+                         '-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',

+                         '-rational-rock', '-translation-table',

+                         '-input-charset', 'utf-8', '-x', './lost+found',

+                         '-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat',

+                         '-no-emul-boot',

+                         '-boot-load-size', '4', '-boot-info-table',

+                         '-eltorito-alt-boot', '-e', 'images/efiboot.img',

+                         '-no-emul-boot',

+                         '-o', 'DP-1.0-20160405.t.3-x86_64.iso',

+                         '-graft-points', '-path-list', 'graft-list'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['pungi-pylorax-find-templates', '/usr/share/lorax'],

+                        show_cmd=True, stdout=True),

+              mock.call(['/usr/bin/isohybrid', '--uefi', 'DP-1.0-20160405.t.3-x86_64.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call('isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest',

+                        show_cmd=True, stdout=True, workdir=self.outdir)]

+         )

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_bootable_run_on_i386(self, run):

+         # This will call isohybrid, but not with --uefi switch

+         run.return_value = (0, '/usr/share/lorax')

+ 

+         createiso.main([

+             '--output-dir=%s' % self.outdir,

+             '--iso-name=DP-1.0-20160405.t.3-i386.iso',

+             '--volid=DP-1.0-20160405.t.3',

+             '--graft-points=graft-list',

+             '--arch=i386',

+             '--buildinstall-method=lorax',

+         ])

+ 

+         self.maxDiff = None

+         self.assertItemsEqual(

+             run.call_args_list,

+             [mock.call(['/usr/bin/genisoimage', '-untranslated-filenames',

+                         '-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',

+                         '-rational-rock', '-translation-table',

+                         '-input-charset', 'utf-8', '-x', './lost+found',

+                         '-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat',

+                         '-no-emul-boot',

+                         '-boot-load-size', '4', '-boot-info-table',

+                         '-o', 'DP-1.0-20160405.t.3-i386.iso',

+                         '-graft-points', '-path-list', 'graft-list'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['pungi-pylorax-find-templates', '/usr/share/lorax'],

+                        show_cmd=True, stdout=True),

+              mock.call(['/usr/bin/isohybrid', 'DP-1.0-20160405.t.3-i386.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-i386.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call('isoinfo -R -f -i DP-1.0-20160405.t.3-i386.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-i386.iso.manifest',

+                        show_cmd=True, stdout=True, workdir=self.outdir)]

+         )

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_bootable_run_ppc64(self, run):

+         run.return_value = (0, '/usr/share/lorax')

+ 

+         createiso.main([

+             '--output-dir=%s' % self.outdir,

+             '--iso-name=DP-1.0-20160405.t.3-ppc64.iso',

+             '--volid=DP-1.0-20160405.t.3',

+             '--graft-points=graft-list',

+             '--arch=ppc64',

+             '--buildinstall-method=lorax',

+         ])

+ 

+         self.maxDiff = None

+         self.assertItemsEqual(

+             run.call_args_list,

+             [mock.call(['/usr/bin/genisoimage', '-untranslated-filenames',

+                         '-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',

+                         '-rational-rock', '-translation-table',

+                         '-x', './lost+found',

+                         '-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop',

+                         '-allow-multidot', '-chrp-boot', '-map', '/usr/share/lorax/config_files/ppc/mapping',

+                         '-hfs-bless', '/ppc/mac',

+                         '-o', 'DP-1.0-20160405.t.3-ppc64.iso',

+                         '-graft-points', '-path-list', 'graft-list'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['pungi-pylorax-find-templates', '/usr/share/lorax'],

+                        show_cmd=True, stdout=True),

+              mock.call(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-ppc64.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call('isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest',

+                        show_cmd=True, stdout=True, workdir=self.outdir)]

+         )

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_bootable_run_buildinstall(self, run):

+         createiso.main([

+             '--output-dir=%s' % self.outdir,

+             '--iso-name=DP-1.0-20160405.t.3-ppc64.iso',

+             '--volid=DP-1.0-20160405.t.3',

+             '--graft-points=graft-list',

+             '--arch=ppc64',

+             '--buildinstall-method=buildinstall',

+         ])

+ 

+         self.maxDiff = None

+         self.assertItemsEqual(

+             run.call_args_list,

+             [mock.call(['/usr/bin/genisoimage', '-untranslated-filenames',

+                         '-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',

+                         '-rational-rock', '-translation-table',

+                         '-x', './lost+found',

+                         '-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop',

+                         '-allow-multidot', '-chrp-boot',

+                         '-map', '/usr/lib/anaconda-runtime/boot/mapping',

+                         '-hfs-bless', '/ppc/mac',

+                         '-o', 'DP-1.0-20160405.t.3-ppc64.iso',

+                         '-graft-points', '-path-list', 'graft-list'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-ppc64.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call('isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest',

+                        show_cmd=True, stdout=True, workdir=self.outdir)]

+         )

+ 

+     @mock.patch('sys.stderr')

+     @mock.patch('kobo.shortcuts.run')

+     def test_run_with_jigdo_bad_args(self, run, stderr):

+         with self.assertRaises(SystemExit):

+             createiso.main([

+                 '--output-dir=%s' % self.outdir,

+                 '--iso-name=DP-1.0-20160405.t.3-x86_64.iso',

+                 '--volid=DP-1.0-20160405.t.3',

+                 '--graft-points=graft-list',

+                 '--arch=x86_64',

+                 '--jigdo-dir=%s/jigdo' % self.topdir,

+             ])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_run_with_jigdo(self, run):

+         createiso.main([

+             '--output-dir=%s' % self.outdir,

+             '--iso-name=DP-1.0-20160405.t.3-x86_64.iso',

+             '--volid=DP-1.0-20160405.t.3',

+             '--graft-points=graft-list',

+             '--arch=x86_64',

+             '--jigdo-dir=%s/jigdo' % self.topdir,

+             '--os-tree=%s/os' % self.topdir,

+         ])

+         self.maxDiff = None

+         self.assertItemsEqual(

+             run.call_args_list,

+             [mock.call(['/usr/bin/genisoimage', '-untranslated-filenames',

+                         '-volid', 'DP-1.0-20160405.t.3', '-J', '-joliet-long',

+                         '-rational-rock', '-translation-table',

+                         '-input-charset', 'utf-8', '-x', './lost+found',

+                         '-o', 'DP-1.0-20160405.t.3-x86_64.iso',

+                         '-graft-points', '-path-list', 'graft-list'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['/usr/bin/implantisomd5', 'DP-1.0-20160405.t.3-x86_64.iso'],

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call('isoinfo -R -f -i DP-1.0-20160405.t.3-x86_64.iso | grep -v \'/TRANS.TBL$\' | sort >> DP-1.0-20160405.t.3-x86_64.iso.manifest',

+                        show_cmd=True, stdout=True, workdir=self.outdir),

+              mock.call(['jigdo-file', 'make-template', '--force',

+                         '--image=%s/isos/DP-1.0-20160405.t.3-x86_64.iso' % self.topdir,

+                         '--jigdo=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.jigdo' % self.topdir,

+                         '--template=%s/jigdo/DP-1.0-20160405.t.3-x86_64.iso.template' % self.topdir,

+                         '--no-servers-section', '--report=noprogress', self.topdir + '/os//'],

+                        show_cmd=True, stdout=True, workdir=self.outdir)]

+         )

+ 

+ 

+ if __name__ == '__main__':

+     unittest.main()

@@ -0,0 +1,76 @@

+ #!/usr/bin/env python2

+ # -*- coding: utf-8 -*-

+ 

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

+ 

+ from pungi.wrappers.createrepo import CreaterepoWrapper

+ 

+ 

+ class CreateRepoWrapperTest(unittest.TestCase):

+ 

+     def test_get_createrepo_c_cmd_minimal(self):

+         repo = CreaterepoWrapper()

+         cmd = repo.get_createrepo_cmd('/test/dir')

+ 

+         self.assertEqual(cmd[:2], ['createrepo_c', '/test/dir'])

+         self.assertItemsEqual(cmd[2:], ['--verbose', '--update', '--database', '--unique-md-filenames'])

+ 

+     def test_get_createrepo_c_cmd_full(self):

+         repo = CreaterepoWrapper()

+         cmd = repo.get_createrepo_cmd(

+             '/test/dir', baseurl='http://base.example.com', excludes=['abc', 'xyz'],

+             pkglist='/test/pkglist', groupfile='/test/comps', cachedir='/test/cache',

+             update=False, update_md_path='/test/md_path', skip_stat=True, checkts=True,

+             split=True, pretty=False, database=False, checksum='sha256', unique_md_filenames=False,

+             distro='Fedora', content=['c1', 'c2'], repo=['r1', 'r2'], revision='rev', deltas=True,

+             oldpackagedirs='/test/old', num_deltas=2, workers=3, outputdir='/test/output'

+         )

+         self.maxDiff = None

+ 

+         self.assertEqual(cmd[:2], ['createrepo_c', '/test/dir'])

+         self.assertItemsEqual(cmd[2:],

+                               ['--baseurl=http://base.example.com', '--excludes=abc', '--excludes=xyz',

+                                '--pkglist=/test/pkglist', '--groupfile=/test/comps', '--cachedir=/test/cache',

+                                '--skip-stat', '--update-md-path=/test/md_path', '--split', '--checkts',

+                                '--checksum=sha256', '--distro=Fedora', '--simple-md-filenames', '--no-database',

+                                '--content=c1', '--content=c2', '--repo=r1', '--repo=r2', '--revision=rev',

+                                '--deltas', '--oldpackagedirs=/test/old', '--num-deltas=2', '--workers=3',

+                                '--outputdir=/test/output', '--verbose'])

+ 

+     def test_get_createrepo_cmd_minimal(self):

+         repo = CreaterepoWrapper(False)

+         cmd = repo.get_createrepo_cmd('/test/dir')

+ 

+         self.assertEqual(cmd[:2], ['createrepo', '/test/dir'])

+         self.assertItemsEqual(cmd[2:], ['--update', '--database', '--unique-md-filenames',

+                                         '--pretty', '--verbose'])

+ 

+     def test_get_createrepo_cmd_full(self):

+         repo = CreaterepoWrapper(False)

+         cmd = repo.get_createrepo_cmd(

+             '/test/dir', baseurl='http://base.example.com', excludes=['abc', 'xyz'],

+             pkglist='/test/pkglist', groupfile='/test/comps', cachedir='/test/cache',

+             update=False, update_md_path='/test/md_path', skip_stat=True, checkts=True,

+             split=True, pretty=False, database=False, checksum='sha256', unique_md_filenames=False,

+             distro='Fedora', content=['c1', 'c2'], repo=['r1', 'r2'], revision='rev', deltas=True,

+             oldpackagedirs='/test/old', num_deltas=2, workers=3, outputdir='/test/output'

+         )

+         self.maxDiff = None

+ 

+         self.assertEqual(cmd[:2], ['createrepo', '/test/dir'])

+         self.assertItemsEqual(cmd[2:],

+                               ['--baseurl=http://base.example.com', '--excludes=abc', '--excludes=xyz',

+                                '--pkglist=/test/pkglist', '--groupfile=/test/comps', '--cachedir=/test/cache',

+                                '--skip-stat', '--update-md-path=/test/md_path', '--split', '--checkts',

+                                '--checksum=sha256', '--distro=Fedora', '--simple-md-filenames', '--no-database',

+                                '--content=c1', '--content=c2', '--repo=r1', '--repo=r2', '--revision=rev',

+                                '--deltas', '--oldpackagedirs=/test/old', '--num-deltas=2', '--workers=3',

+                                '--outputdir=/test/output', '--verbose'])

@@ -0,0 +1,332 @@

+ #!/usr/bin/env python2

+ # -*- coding: utf-8 -*-

+ 

+ 

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

+ 

+ from pungi.phases.createrepo import CreaterepoPhase, create_variant_repo

+ from tests.helpers import DummyCompose, PungiTestCase, copy_fixture, touch

+ 

+ 

+ class TestCreaterepoPhase(PungiTestCase):

+     @mock.patch('pungi.phases.createrepo.ThreadPool')

+     def test_fails_deltas_without_old_compose(self, ThreadPoolCls):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+             'createrepo_deltas': True,

+         })

+ 

+         phase = CreaterepoPhase(compose)

+         with self.assertRaises(ValueError) as ctx:

+             phase.validate()

+ 

+         self.assertIn('deltas', str(ctx.exception))

+ 

+     @mock.patch('pungi.phases.createrepo.ThreadPool')

+     def test_starts_jobs(self, ThreadPoolCls):

+         compose = DummyCompose(self.topdir, {})

+ 

+         pool = ThreadPoolCls.return_value

+ 

+         phase = CreaterepoPhase(compose)

+         phase.run()

+         self.maxDiff = None

+ 

+         self.assertEqual(len(pool.add.mock_calls), 3)

+         self.assertItemsEqual(

+             pool.queue_put.mock_calls,

+             [mock.call((compose, 'x86_64', compose.variants['Server'], 'rpm')),

+              mock.call((compose, 'x86_64', compose.variants['Server'], 'debuginfo')),

+              mock.call((compose, 'amd64', compose.variants['Server'], 'rpm')),

+              mock.call((compose, 'amd64', compose.variants['Server'], 'debuginfo')),

+              mock.call((compose, None, compose.variants['Server'], 'srpm')),

+              mock.call((compose, 'x86_64', compose.variants['Everything'], 'rpm')),

+              mock.call((compose, 'x86_64', compose.variants['Everything'], 'debuginfo')),

+              mock.call((compose, 'amd64', compose.variants['Everything'], 'rpm')),

+              mock.call((compose, 'amd64', compose.variants['Everything'], 'debuginfo')),

+              mock.call((compose, None, compose.variants['Everything'], 'srpm')),

+              mock.call((compose, 'amd64', compose.variants['Client'], 'rpm')),

+              mock.call((compose, 'amd64', compose.variants['Client'], 'debuginfo')),

+              mock.call((compose, None, compose.variants['Client'], 'srpm'))])

+ 

+     @mock.patch('pungi.phases.createrepo.ThreadPool')

+     def test_skips_empty_variants(self, ThreadPoolCls):

+         compose = DummyCompose(self.topdir, {})

+         compose.variants['Client'].is_empty = True

+ 

+         pool = ThreadPoolCls.return_value

+ 

+         phase = CreaterepoPhase(compose)

+         phase.run()

+         self.maxDiff = None

+ 

+         self.assertEqual(len(pool.add.mock_calls), 3)

+         self.assertItemsEqual(

+             pool.queue_put.mock_calls,

+             [mock.call((compose, 'x86_64', compose.variants['Server'], 'rpm')),

+              mock.call((compose, 'x86_64', compose.variants['Server'], 'debuginfo')),

+              mock.call((compose, 'amd64', compose.variants['Server'], 'rpm')),

+              mock.call((compose, 'amd64', compose.variants['Server'], 'debuginfo')),

+              mock.call((compose, None, compose.variants['Server'], 'srpm')),

+              mock.call((compose, 'x86_64', compose.variants['Everything'], 'rpm')),

+              mock.call((compose, 'x86_64', compose.variants['Everything'], 'debuginfo')),

+              mock.call((compose, 'amd64', compose.variants['Everything'], 'rpm')),

+              mock.call((compose, 'amd64', compose.variants['Everything'], 'debuginfo')),

+              mock.call((compose, None, compose.variants['Everything'], 'srpm'))])

+ 

+ 

+ class TestCreateRepoThread(PungiTestCase):

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_rpms(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm')

+ 

+         list_file = self.topdir + '/work/x86_64/repo_package_list/Server.x86_64.rpm.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/x86_64/os', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/x86_64/os',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/x86_64/repo',

+                        deltas=False, oldpackagedirs=None)])

+         with open(list_file) as f:

+             self.assertEqual(f.read(), 'Packages/b/bash-4.3.30-2.fc21.x86_64.rpm\n')

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_source(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, None, compose.variants['Server'], 'srpm')

+ 

+         list_file = self.topdir + '/work/global/repo_package_list/Server.None.srpm.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/source/tree', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/source/tree',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/global/repo',

+                        deltas=False, oldpackagedirs=None)])

+         with open(list_file) as f:

+             self.assertItemsEqual(

+                 f.read().strip().split('\n'),

+                 ['../SRPMS/b/bash-4.3.30-2.fc21.src.rpm'])

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_debug(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'debuginfo')

+         self.maxDiff = None

+ 

+         list_file = self.topdir + '/work/x86_64/repo_package_list/Server.x86_64.debuginfo.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/x86_64/debug/tree', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/x86_64/debug/tree',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/x86_64/repo',

+                        deltas=False, oldpackagedirs=None)])

+         with open(list_file) as f:

+             self.assertEqual(f.read(), 'Packages/b/bash-debuginfo-4.3.30-2.fc21.x86_64.rpm\n')

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_no_createrepo_c(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_c': False,

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm')

+ 

+         list_file = self.topdir + '/work/x86_64/repo_package_list/Server.x86_64.rpm.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=False))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/x86_64/os', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/x86_64/os',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/x86_64/repo',

+                        deltas=False, oldpackagedirs=None)])

+         with open(list_file) as f:

+             self.assertEqual(f.read(), 'Packages/b/bash-4.3.30-2.fc21.x86_64.rpm\n')

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_is_idepotent(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         # Running the same thing twice only creates repo once.

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm')

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm')

+ 

+         list_file = self.topdir + '/work/x86_64/repo_package_list/Server.x86_64.rpm.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/x86_64/os', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/x86_64/os',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/x86_64/repo',

+                        deltas=False, oldpackagedirs=None)])

+         with open(list_file) as f:

+             self.assertEqual(f.read(), 'Packages/b/bash-4.3.30-2.fc21.x86_64.rpm\n')

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_rpms_with_deltas(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+             'createrepo_deltas': True,

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+         compose.old_composes = [self.topdir + '/old']

+         touch(os.path.join(self.topdir, 'old', 'test-1.0-20151203.0', 'STATUS'), 'FINISHED')

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'rpm')

+ 

+         list_file = self.topdir + '/work/x86_64/repo_package_list/Server.x86_64.rpm.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/x86_64/os', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/x86_64/os',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/x86_64/repo', deltas=True,

+                        oldpackagedirs=self.topdir + '/old/test-1.0-20151203.0/compose/Server/x86_64/os')])

+         with open(list_file) as f:

+             self.assertEqual(f.read(), 'Packages/b/bash-4.3.30-2.fc21.x86_64.rpm\n')

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_source_with_deltas(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+             'createrepo_deltas': True,

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+         compose.old_composes = [self.topdir + '/old']

+         touch(os.path.join(self.topdir, 'old', 'test-1.0-20151203.0', 'STATUS'), 'FINISHED')

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, None, compose.variants['Server'], 'srpm')

+ 

+         list_file = self.topdir + '/work/global/repo_package_list/Server.None.srpm.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/source/tree', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/source/tree',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/global/repo', deltas=True,

+                        oldpackagedirs=self.topdir + '/old/test-1.0-20151203.0/compose/Server/source/tree')])

+         with open(list_file) as f:

+             self.assertItemsEqual(

+                 f.read().strip().split('\n'),

+                 ['../SRPMS/b/bash-4.3.30-2.fc21.src.rpm'])

+ 

+     @mock.patch('pungi.phases.createrepo.run')

+     @mock.patch('pungi.phases.createrepo.CreaterepoWrapper')

+     def test_variant_repo_debug_with_deltas(self, CreaterepoWrapperCls, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+             'createrepo_deltas': True,

+         })

+         compose.DEBUG = False

+         compose.has_comps = False

+         compose.old_composes = [self.topdir + '/old']

+         touch(os.path.join(self.topdir, 'old', 'test-1.0-20151203.0', 'STATUS'), 'FINISHED')

+ 

+         repo = CreaterepoWrapperCls.return_value

+         copy_fixture('server-rpms.json', compose.paths.compose.metadata('rpms.json'))

+ 

+         create_variant_repo(compose, 'x86_64', compose.variants['Server'], 'debuginfo')

+ 

+         list_file = self.topdir + '/work/x86_64/repo_package_list/Server.x86_64.debuginfo.conf'

+         self.assertEqual(CreaterepoWrapperCls.mock_calls[0],

+                          mock.call(createrepo_c=True))

+         self.assertItemsEqual(

+             repo.get_createrepo_cmd.mock_calls,

+             [mock.call(self.topdir + '/compose/Server/x86_64/debug/tree', checksum='sha256',

+                        database=True, groupfile=None, workers=3,

+                        outputdir=self.topdir + '/compose/Server/x86_64/debug/tree',

+                        pkglist=list_file, skip_stat=True, update=True,

+                        update_md_path=self.topdir + '/work/x86_64/repo', deltas=True,

+                        oldpackagedirs=self.topdir + '/old/test-1.0-20151203.0/compose/Server/x86_64/debug/tree')])

+         with open(list_file) as f:

+             self.assertEqual(f.read(), 'Packages/b/bash-debuginfo-4.3.30-2.fc21.x86_64.rpm\n')

+ 

+ 

+ if __name__ == "__main__":

+     unittest.main()

file modified
+152 -30
@@ -2,7 +2,10 @@

  # -*- coding: utf-8 -*-

  

  

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import mock

  

  import os
@@ -11,7 +14,7 @@

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

  

  from pungi.phases.image_build import ImageBuildPhase, CreateImageBuildThread

- from tests.helpers import DummyCompose, PungiTestCase

+ from tests.helpers import DummyCompose, PungiTestCase, boom

  

  

  class TestImageBuildPhase(PungiTestCase):
@@ -251,6 +254,66 @@

          })

  

      @mock.patch('pungi.phases.image_build.ThreadPool')

+     def test_image_build_set_external_install_tree(self, ThreadPool):

+         compose = DummyCompose(self.topdir, {

+             'image_build': {

+                 '^Server$': [

+                     {

+                         'image-build': {

+                             'format': [('docker', 'tar.xz')],

+                             'name': 'Fedora-Docker-Base',

+                             'target': 'f24',

+                             'version': 'Rawhide',

+                             'ksurl': 'git://git.fedorahosted.org/git/spin-kickstarts.git',

+                             'kickstart': "fedora-docker-base.ks",

+                             'distro': 'Fedora-20',

+                             'disk_size': 3,

+                             'arches': ['x86_64'],

+                             'install_tree_from': 'http://example.com/install-tree/',

+                         }

+                     }

+                 ]

+             },

+             'koji_profile': 'koji',

+         })

+ 

+         phase = ImageBuildPhase(compose)

+ 

+         phase.run()

+ 

+         # assert at least one thread was started

+         self.assertTrue(phase.pool.add.called)

+ 

+         self.assertTrue(phase.pool.queue_put.called_once)

+         args, kwargs = phase.pool.queue_put.call_args

+         self.assertEqual(args[0][0], compose)

+         self.maxDiff = None

+         self.assertDictEqual(args[0][1], {

+             "format": [('docker', 'tar.xz')],

+             "image_conf": {

+                 'image-build': {

+                     'install_tree': 'http://example.com/install-tree/',

+                     'kickstart': 'fedora-docker-base.ks',

+                     'format': 'docker',

+                     'repo': ','.join([self.topdir + '/compose/Server/$arch/os']),

+                     'variant': compose.variants['Server'],

+                     'target': 'f24',

+                     'disk_size': 3,

+                     'name': 'Fedora-Docker-Base',

+                     'arches': 'x86_64',

+                     'version': 'Rawhide',

+                     'ksurl': 'git://git.fedorahosted.org/git/spin-kickstarts.git',

+                     'distro': 'Fedora-20',

+                 }

+             },

+             "conf_file": self.topdir + '/work/image-build/Server/docker_Fedora-Docker-Base.cfg',

+             "image_dir": self.topdir + '/compose/Server/%(arch)s/images',

+             "relative_image_dir": 'Server/%(arch)s/images',

+             "link_type": 'hardlink-or-copy',

+             "scratch": False,

+         })

+ 

+     @mock.patch('pungi.phases.image_build.ThreadPool')

      def test_image_build_create_release(self, ThreadPool):

          compose = DummyCompose(self.topdir, {

              'image_build': {
@@ -284,7 +347,7 @@

          self.assertTrue(phase.pool.queue_put.called_once)

          args, kwargs = phase.pool.queue_put.call_args

          self.assertEqual(args[0][1].get('image_conf', {}).get('image-build', {}).get('release'),

-                          '20151203.0')

+                          '20151203.t.0')

  

      @mock.patch('pungi.phases.image_build.ThreadPool')

      def test_image_build_scratch_build(self, ThreadPool):
@@ -321,6 +384,45 @@

          args, kwargs = phase.pool.queue_put.call_args

          self.assertTrue(args[0][1].get('scratch'))

  

+     @mock.patch('pungi.phases.image_build.resolve_git_url')

+     @mock.patch('pungi.phases.image_build.ThreadPool')

+     def test_image_build_resolve_ksurl(self, ThreadPool, resolve_git_url):

+         compose = DummyCompose(self.topdir, {

+             'image_build': {

+                 '^Server$': [

+                     {

+                         'image-build': {

+                             'format': [('docker', 'tar.xz')],

+                             'name': 'Fedora-Docker-Base',

+                             'target': 'f24',

+                             'version': 'Rawhide',

+                             'ksurl': 'git://git.fedorahosted.org/git/spin-kickstarts.git?#HEAD',

+                             'kickstart': "fedora-docker-base.ks",

+                             'distro': 'Fedora-20',

+                             'disk_size': 3,

+                             'arches': ['x86_64'],

+                             'scratch': True,

+                         }

+                     }

+                 ]

+             },

+             'koji_profile': 'koji',

+         })

+ 

+         resolve_git_url.return_value = 'git://git.fedorahosted.org/git/spin-kickstarts.git?#BEEFCAFE'

+ 

+         phase = ImageBuildPhase(compose)

+ 

+         phase.run()

+ 

+         # assert at least one thread was started

+         self.assertTrue(phase.pool.add.called)

+ 

+         self.assertTrue(phase.pool.queue_put.called_once)

+         args, kwargs = phase.pool.queue_put.call_args

+         self.assertEqual(args[0][1]['image_conf'].get('image-build', {}).get('ksurl'),

+                          resolve_git_url.return_value)

+ 

  

  class TestCreateImageBuildThread(PungiTestCase):

  
@@ -349,6 +451,7 @@

                      'version': 'Rawhide',

                      'ksurl': 'git://git.fedorahosted.org/git/spin-kickstarts.git',

                      'distro': 'Fedora-20',

+                     'subvariant': 'KDE',

                  }

              },

              "conf_file": 'amd64,x86_64-Client-Fedora-Docker-Base-docker',
@@ -385,19 +488,36 @@

              t.process((compose, cmd), 1)

  

          self.assertItemsEqual(

+             koji_wrapper.get_image_build_cmd.call_args_list,

+             [mock.call(cmd['image_conf'],

+                        conf_file_dest='amd64,x86_64-Client-Fedora-Docker-Base-docker',

+                        scratch=False)]

+         )

+ 

+         self.assertItemsEqual(

+             koji_wrapper.run_blocking_cmd.call_args_list,

+             [mock.call(koji_wrapper.get_image_build_cmd.return_value,

+                        log_file=self.topdir + '/logs/amd64-x86_64/imagebuild-Client-KDE-docker.amd64-x86_64.log')]

+         )

+ 

+         self.assertItemsEqual(

              linker.mock_calls,

-             [mock.call('/koji/task/1235/Fedora-Docker-Base-20160103.amd64.qcow2',

-                        self.topdir + '/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.qcow2',

-                        link_type='hardlink-or-copy'),

-              mock.call('/koji/task/1235/Fedora-Docker-Base-20160103.amd64.tar.xz',

-                        self.topdir + '/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.tar.xz',

-                        link_type='hardlink-or-copy'),

-              mock.call('/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.qcow2',

-                        self.topdir + '/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.qcow2',

-                        link_type='hardlink-or-copy'),

-              mock.call('/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.tar.xz',

-                        self.topdir + '/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.tar.xz',

-                        link_type='hardlink-or-copy')])

+             [mock.call.link(

+                 '/koji/task/1235/Fedora-Docker-Base-20160103.amd64.qcow2',

+                 self.topdir + '/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.qcow2',

+                 link_type='hardlink-or-copy'),

+              mock.call.link(

+                  '/koji/task/1235/Fedora-Docker-Base-20160103.amd64.tar.xz',

+                  self.topdir + '/compose/Client/amd64/images/Fedora-Docker-Base-20160103.amd64.tar.xz',

+                  link_type='hardlink-or-copy'),

+              mock.call.link(

+                  '/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.qcow2',

+                  self.topdir + '/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.qcow2',

+                  link_type='hardlink-or-copy'),

+              mock.call.link(

+                  '/koji/task/1235/Fedora-Docker-Base-20160103.x86_64.tar.xz',

+                  self.topdir + '/compose/Client/x86_64/images/Fedora-Docker-Base-20160103.x86_64.tar.xz',

+                  link_type='hardlink-or-copy')])

  

          image_relative_paths = {

              'image_dir/Client/amd64/Fedora-Docker-Base-20160103.amd64.qcow2': {
@@ -434,6 +554,7 @@

              data = image_relative_paths.pop(image.path)

              self.assertEqual(data['format'], image.format)

              self.assertEqual(data['type'], image.type)

+             self.assertEqual('KDE', image.subvariant)

  

          self.assertTrue(os.path.isdir(self.topdir + '/compose/Client/amd64/images'))

          self.assertTrue(os.path.isdir(self.topdir + '/compose/Client/x86_64/images'))
@@ -482,12 +603,15 @@

          }

  

          t = CreateImageBuildThread(pool)

-         with mock.patch('os.stat') as stat:

-             with mock.patch('os.path.getsize') as getsize:

-                 with mock.patch('time.sleep'):

-                     getsize.return_value = 1024

-                     stat.return_value.st_mtime = 13579

-                     t.process((compose, cmd), 1)

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Image-build (variant Client, arch *) failed, but going on anyway.'),

+             mock.call('ImageBuild task failed: 1234. See %s for more details.'

+                       % (os.path.join(self.topdir,

+                                       'logs/amd64-x86_64/imagebuild-Client-Client-docker.amd64-x86_64.log'))),

+         ])

  

      @mock.patch('pungi.phases.image_build.KojiWrapper')

      @mock.patch('pungi.phases.image_build.Linker')
@@ -526,19 +650,17 @@

              'scratch': False,

          }

  

-         def boom(*args, **kwargs):

-             raise RuntimeError('BOOM')

- 

          koji_wrapper = KojiWrapper.return_value

          koji_wrapper.run_blocking_cmd.side_effect = boom

  

          t = CreateImageBuildThread(pool)

-         with mock.patch('os.stat') as stat:

-             with mock.patch('os.path.getsize') as getsize:

-                 with mock.patch('time.sleep'):

-                     getsize.return_value = 1024

-                     stat.return_value.st_mtime = 13579

-                     t.process((compose, cmd), 1)

+         with mock.patch('time.sleep'):

+             t.process((compose, cmd), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Image-build (variant Client, arch *) failed, but going on anyway.'),

+             mock.call('BOOM'),

+         ])

  

  

  if __name__ == "__main__":

@@ -1,8 +1,10 @@

  #!/usr/bin/env python

  # -*- coding: utf-8 -*-

  

- 

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import mock

  

  import os
@@ -86,8 +88,9 @@

          compose = DummyCompose(self.topdir, {

              'media_checksums': ['sha256'],

              'media_checksum_one_file': True,

-             'media_checksum_base_filename': '%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s'

+             'media_checksum_base_filename': '%(release_short)s-%(variant)s-%(version)s-%(date)s%(type_suffix)s.%(respin)s_%(label)s'

          })

+         compose.compose_label = 'Alpha-1.0'

  

          phase = ImageChecksumPhase(compose)

  
@@ -98,7 +101,7 @@

  

          dump.assert_called_once_with(self.topdir + '/compose/Client/i386/iso', 'sha256',

                                       {'image.iso': 'cafebabe'},

-                                      'test-Client-1.0-20151203.t.0-CHECKSUM')

+                                      'test-Client-1.0-20151203.t.0_Alpha-1.0-CHECKSUM')

          cc.assert_called_once_with(self.topdir + '/compose/Client/i386/iso/image.iso', ['sha256'])

          compose.image.add_checksum.assert_called_once_with(None, 'sha256', 'cafebabe')

  

@@ -0,0 +1,268 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ 

+ import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

+ 

+ from pungi.phases import init

+ from tests.helpers import DummyCompose, PungiTestCase, touch, union

+ 

+ MIN_CONFIG = {

+     'release_short': 'Fedora',

+     'release_name': 'Fedora',

+     'release_version': 'Rawhide',

+     'release_is_layered': False,

+     'variants_file': 'does-not-exist.xml',

+     'sigkeys': [],

+     'createrepo_checksum': 'sha256',

+     'runroot': False,

+ }

+ 

+ 

+ class TestInitPhase(PungiTestCase):

+ 

+     @mock.patch('pungi.phases.init.write_global_comps')

+     @mock.patch('pungi.phases.init.write_arch_comps')

+     @mock.patch('pungi.phases.init.create_comps_repo')

+     @mock.patch('pungi.phases.init.write_variant_comps')

+     @mock.patch('pungi.phases.init.write_prepopulate_file')

+     def test_run(self, write_prepopulate, write_variant, create_comps, write_arch, write_global):

+         compose = DummyCompose(self.topdir, {})

+         compose.has_comps = True

+         phase = init.InitPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(write_global.mock_calls, [mock.call(compose)])

+         self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])

+         self.assertItemsEqual(write_arch.mock_calls,

+                               [mock.call(compose, 'x86_64'), mock.call(compose, 'amd64')])

+         self.assertItemsEqual(create_comps.mock_calls,

+                               [mock.call(compose, 'x86_64'), mock.call(compose, 'amd64')])

+         self.assertItemsEqual(write_variant.mock_calls,

+                               [mock.call(compose, 'x86_64', compose.variants['Server']),

+                                mock.call(compose, 'amd64', compose.variants['Server']),

+                                mock.call(compose, 'amd64', compose.variants['Client']),

+                                mock.call(compose, 'x86_64', compose.variants['Everything']),

+                                mock.call(compose, 'amd64', compose.variants['Everything'])])

+ 

+     @mock.patch('pungi.phases.init.copy_variant_comps')

+     @mock.patch('pungi.phases.init.write_global_comps')

+     @mock.patch('pungi.phases.init.write_arch_comps')

+     @mock.patch('pungi.phases.init.create_comps_repo')

+     @mock.patch('pungi.phases.init.write_variant_comps')

+     @mock.patch('pungi.phases.init.write_prepopulate_file')

+     def test_run_with_preserve(self, write_prepopulate, write_variant, create_comps,

+                                write_arch, write_global, copy_comps):

+         compose = DummyCompose(self.topdir, {

+             'keep_original_comps': ['Everything'],

+         })

+         compose.has_comps = True

+         phase = init.InitPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(write_global.mock_calls, [mock.call(compose)])

+         self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])

+         self.assertItemsEqual(write_arch.mock_calls,

+                               [mock.call(compose, 'x86_64'), mock.call(compose, 'amd64')])

+         self.assertItemsEqual(create_comps.mock_calls,

+                               [mock.call(compose, 'x86_64'), mock.call(compose, 'amd64')])

+         self.assertItemsEqual(write_variant.mock_calls,

+                               [mock.call(compose, 'x86_64', compose.variants['Server']),

+                                mock.call(compose, 'amd64', compose.variants['Server']),

+                                mock.call(compose, 'amd64', compose.variants['Client'])])

+         self.assertItemsEqual(copy_comps.mock_calls,

+                               [mock.call(compose, 'x86_64', compose.variants['Everything']),

+                                mock.call(compose, 'amd64', compose.variants['Everything'])])

+ 

+     @mock.patch('pungi.phases.init.copy_variant_comps')

+     @mock.patch('pungi.phases.init.write_global_comps')

+     @mock.patch('pungi.phases.init.write_arch_comps')

+     @mock.patch('pungi.phases.init.create_comps_repo')

+     @mock.patch('pungi.phases.init.write_variant_comps')

+     @mock.patch('pungi.phases.init.write_prepopulate_file')

+     def test_run_without_comps(self, write_prepopulate, write_variant, create_comps,

+                                write_arch, write_global, copy_comps):

+         compose = DummyCompose(self.topdir, {})

+         compose.has_comps = False

+         phase = init.InitPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(write_global.mock_calls, [])

+         self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])

+         self.assertItemsEqual(write_arch.mock_calls, [])

+         self.assertItemsEqual(create_comps.mock_calls, [])

+         self.assertItemsEqual(write_variant.mock_calls, [])

+         self.assertItemsEqual(copy_comps.mock_calls, [])

+ 

+     def test_validate_keep_original_comps_missing(self):

+         compose = DummyCompose(self.topdir, MIN_CONFIG)

+         phase = init.InitPhase(compose)

+         phase.validate()

+ 

+     def test_validate_keep_original_comps_empty(self):

+         config = union(MIN_CONFIG, {'keep_original_comps': []})

+         compose = DummyCompose(self.topdir, config)

+         phase = init.InitPhase(compose)

+         phase.validate()

+ 

+     def test_validate_keep_original_comps_filled_in(self):

+         config = union(MIN_CONFIG, {'keep_original_comps': ['Everything']})

+         compose = DummyCompose(self.topdir, config)

+         phase = init.InitPhase(compose)

+         phase.validate()

+ 

+ 

+ class TestWriteArchComps(PungiTestCase):

+ 

+     @mock.patch('pungi.phases.init.run')

+     def test_run(self, run):

+         compose = DummyCompose(self.topdir, {})

+         compose.DEBUG = False

+ 

+         init.write_arch_comps(compose, 'x86_64')

+ 

+         self.assertEqual(run.mock_calls,

+                          [mock.call(['comps_filter', '--arch=x86_64', '--no-cleanup',

+                                      '--output=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir,

+                                      self.topdir + '/work/global/comps/comps-global.xml'])])

+ 

+     @mock.patch('pungi.phases.init.run')

+     def test_run_in_debug(self, run):

+         compose = DummyCompose(self.topdir, {})

+         compose.DEBUG = True

+         touch(self.topdir + '/work/x86_64/comps/comps-x86_64.xml')

+ 

+         init.write_arch_comps(compose, 'x86_64')

+ 

+         self.assertEqual(run.mock_calls, [])

+ 

+ 

+ class TestCreateCompsRepo(PungiTestCase):

+ 

+     @mock.patch('pungi.phases.init.run')

+     def test_run(self, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = False

+ 

+         init.create_comps_repo(compose, 'x86_64')

+ 

+         self.assertEqual(run.mock_calls,

+                          [mock.call(['createrepo_c', self.topdir + '/work/x86_64/comps_repo', '--verbose',

+                                      '--outputdir=%s/work/x86_64/comps_repo' % self.topdir,

+                                      '--groupfile=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir,

+                                      '--update', '--skip-stat', '--database', '--checksum=sha256',

+                                      '--unique-md-filenames'],

+                                     logfile=self.topdir + '/logs/x86_64/comps_repo.x86_64.log',

+                                     show_cmd=True)])

+ 

+     @mock.patch('pungi.phases.init.run')

+     def test_run_in_debug(self, run):

+         compose = DummyCompose(self.topdir, {

+             'createrepo_checksum': 'sha256',

+         })

+         compose.DEBUG = True

+         os.makedirs(self.topdir + '/work/x86_64/comps_repo/repodata')

+ 

+         init.create_comps_repo(compose, 'x86_64')

+ 

+         self.assertEqual(run.mock_calls, [])

+ 

+ 

+ class TestWriteGlobalComps(PungiTestCase):

+ 

+     @mock.patch('shutil.copy2')

+     @mock.patch('pungi.phases.init.get_file_from_scm')

+     def test_run_in_debug(self, get_file, copy2):

+         compose = DummyCompose(self.topdir, {'comps_file': 'some-file.xml'})

+         compose.DEBUG = True

+         touch(self.topdir + '/work/global/comps/comps-global.xml')

+ 

+         init.write_global_comps(compose)

+ 

+         self.assertEqual(get_file.mock_calls, [])

+         self.assertEqual(copy2.mock_calls, [])

+ 

+     @mock.patch('pungi.phases.init.get_file_from_scm')

+     def test_run_local_file(self, get_file):

+         compose = DummyCompose(self.topdir, {'comps_file': 'some-file.xml'})

+         compose.DEBUG = False

+ 

+         def gen_file(src, dest, logger=None):

+             self.assertEqual(src, '/home/releng/config/some-file.xml')

+             touch(os.path.join(dest, 'some-file.xml'))

+ 

+         get_file.side_effect = gen_file

+ 

+         init.write_global_comps(compose)

+ 

+         self.assertTrue(os.path.isfile(self.topdir + '/work/global/comps/comps-global.xml'))

+ 

+ 

+ class TestWriteVariantComps(PungiTestCase):

+ 

+     @mock.patch('pungi.phases.init.run')

+     @mock.patch('pungi.phases.init.CompsWrapper')

+     def test_run(self, CompsWrapper, run):

+         compose = DummyCompose(self.topdir, {})

+         compose.DEBUG = False

+         variant = compose.variants['Server']

+ 

+         init.write_variant_comps(compose, 'x86_64', variant)

+ 

+         self.assertEqual(run.mock_calls,

+                          [mock.call(['comps_filter', '--arch=x86_64', '--keep-empty-group=conflicts',

+                                      '--keep-empty-group=conflicts-server',

+                                      '--output=%s/work/x86_64/comps/comps-Server.x86_64.xml' % self.topdir,

+                                      self.topdir + '/work/global/comps/comps-global.xml'])])

+         self.assertEqual(CompsWrapper.call_args_list,

+                          [mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])

+         comps = CompsWrapper.return_value

+         self.assertEqual(comps.filter_groups.mock_calls, [mock.call(variant.groups)])

+         self.assertEqual(comps.filter_environments.mock_calls,

+                          [mock.call(variant.environments)])

+         self.assertEqual(comps.write_comps.mock_calls, [mock.call()])

+ 

+     @mock.patch('pungi.phases.init.run')

+     @mock.patch('pungi.phases.init.CompsWrapper')

+     def test_run_in_debug(self, CompsWrapper, run):

+         compose = DummyCompose(self.topdir, {})

+         compose.DEBUG = True

+         variant = compose.variants['Server']

+         touch(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')

+ 

+         init.write_variant_comps(compose, 'x86_64', variant)

+ 

+         self.assertEqual(run.mock_calls, [])

+         self.assertEqual(CompsWrapper.call_args_list,

+                          [mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])

+         comps = CompsWrapper.return_value

+         self.assertEqual(comps.filter_groups.mock_calls, [mock.call(variant.groups)])

+         self.assertEqual(comps.filter_environments.mock_calls,

+                          [mock.call(variant.environments)])

+         self.assertEqual(comps.write_comps.mock_calls, [])

+ 

+ 

+ class TestCopyVariantComps(PungiTestCase):

+ 

+     @mock.patch('shutil.copy')

+     def test_run(self, copy):

+         compose = DummyCompose(self.topdir, {})

+         variant = compose.variants['Server']

+ 

+         init.copy_variant_comps(compose, 'x86_64', variant)

+ 

+         self.assertEqual(copy.mock_calls,

+                          [mock.call(self.topdir + '/work/global/comps/comps-global.xml',

+                                     self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')])

+ 

+ 

+ if __name__ == "__main__":

+     unittest.main()

file modified
+111 -1
@@ -2,7 +2,10 @@

  # -*- coding: utf-8 -*-

  

  import mock

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import tempfile

  

  import os
@@ -398,5 +401,112 @@

          self.assertDictEqual(result, {'retcode': 1, 'output': output, 'task_id': 12345})

  

  

+ class RunBlockingCmdTest(KojiWrapperBaseTestCase):

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_minimal(self, run):

+         output = 'Created task: 1234\nHello\n'

+         run.return_value = (0, output)

+ 

+         result = self.koji.run_blocking_cmd('cmd')

+ 

+         self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': 1234})

+         self.assertItemsEqual(run.mock_calls,

+                               [mock.call('cmd', can_fail=True, logfile=None)])

+ 

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_with_log(self, run):

+         output = 'Created task: 1234\nHello\n'

+         run.return_value = (0, output)

+ 

+         result = self.koji.run_blocking_cmd('cmd', log_file='logfile')

+ 

+         self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': 1234})

+         self.assertItemsEqual(run.mock_calls,

+                               [mock.call('cmd', can_fail=True, logfile='logfile')])

+ 

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_fail_with_task_id(self, run):

+         output = 'Created task: 1234\nBoom\n'

+         run.return_value = (1, output)

+ 

+         result = self.koji.run_blocking_cmd('cmd')

+ 

+         self.assertDictEqual(result, {'retcode': 1, 'output': output, 'task_id': 1234})

+         self.assertItemsEqual(run.mock_calls,

+                               [mock.call('cmd', can_fail=True, logfile=None)])

+ 

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_fail_without_task_id(self, run):

+         output = 'Not found\n'

+         run.return_value = (1, output)

+ 

+         with self.assertRaises(RuntimeError) as ctx:

+             self.koji.run_blocking_cmd('cmd')

+ 

+         self.assertItemsEqual(run.mock_calls,

+                               [mock.call('cmd', can_fail=True, logfile=None)])

+         self.assertIn('Could not find task ID', str(ctx.exception))

+ 

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_disconnect_and_retry(self, run):

+         output = 'Created task: 1234\nerror: failed to connect\n'

+         retry = 'Created task: 1234\nOook\n'

+         run.side_effect = [(1, output), (0, retry)]

+ 

+         result = self.koji.run_blocking_cmd('cmd')

+ 

+         self.assertDictEqual(result, {'retcode': 0, 'output': retry, 'task_id': 1234})

+         self.assertEqual(run.mock_calls,

+                          [mock.call('cmd', can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None)])

+ 

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_disconnect_and_retry_but_fail(self, run):

+         output = 'Created task: 1234\nerror: failed to connect\n'

+         retry = 'Created task: 1234\nNot working still\n'

+         run.side_effect = [(1, output), (1, retry)]

+ 

+         result = self.koji.run_blocking_cmd('cmd')

+ 

+         self.assertDictEqual(result, {'retcode': 1, 'output': retry, 'task_id': 1234})

+         self.assertEqual(run.mock_calls,

+                          [mock.call('cmd', can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None)])

+ 

+     @mock.patch('time.sleep')

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_disconnect_and_retry_multiple_times(self, run, sleep):

+         output = 'Created task: 1234\nerror: failed to connect\n'

+         retry = 'Created task: 1234\nOK\n'

+         run.side_effect = [(1, output), (1, output), (1, output), (0, retry)]

+ 

+         result = self.koji.run_blocking_cmd('cmd')

+ 

+         self.assertDictEqual(result, {'retcode': 0, 'output': retry, 'task_id': 1234})

+         self.assertEqual(run.mock_calls,

+                          [mock.call('cmd', can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None)])

+         self.assertEqual(sleep.mock_calls,

+                          [mock.call(i * 10) for i in range(1, 3)])

+ 

+     @mock.patch('time.sleep')

+     @mock.patch('pungi.wrappers.kojiwrapper.run')

+     def test_disconnect_and_never_reconnect(self, run, sleep):

+         output = 'Created task: 1234\nerror: failed to connect\n'

+         run.side_effect = [(1, output), (1, output), (1, output), (1, output)]

+ 

+         with self.assertRaises(RuntimeError) as ctx:

+             self.koji.run_blocking_cmd('cmd', max_retries=2)

+ 

+         self.assertIn('Failed to wait', str(ctx.exception))

+         self.assertEqual(run.mock_calls,

+                          [mock.call('cmd', can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None),

+                           mock.call(['koji', 'watch-task', '1234'], can_fail=True, logfile=None)])

+         self.assertEqual(sleep.mock_calls, [mock.call(i * 10) for i in range(1, 2)])

+ 

+ 

  if __name__ == "__main__":

      unittest.main()

file modified
+132 -17
@@ -11,7 +11,7 @@

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

  

  from pungi.phases.live_images import LiveImagesPhase, CreateLiveImageThread

- from tests.helpers import DummyCompose, PungiTestCase

+ from tests.helpers import DummyCompose, PungiTestCase, boom

  

  

  class TestLiveImagesPhase(PungiTestCase):
@@ -54,7 +54,56 @@

                                             'specfile': None,

                                             'sign': False,

                                             'type': 'live',

-                                            'release': '20151203.0',

+                                            'release': '20151203.t.0',

+                                            'subvariant': 'Client',

+                                            'ksurl': None},

+                                           compose.variants['Client'],

+                                           'amd64'))])

+         self.assertItemsEqual(

+             compose.get_image_name.mock_calls,

+             [mock.call('amd64', compose.variants['Client'], disc_num=None, disc_type='live',

+                        format='%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s')])

+ 

+     @mock.patch('pungi.phases.live_images.ThreadPool')

+     def test_live_image_build_single_repo_from(self, ThreadPool):

+         compose = DummyCompose(self.topdir, {

+             'live_images': [

+                 ('^Client$', {

+                     'amd64': {

+                         'kickstart': 'test.ks',

+                         'additional_repos': ['http://example.com/repo/'],

+                         'repo_from': 'Everything',

+                         'release': None,

+                     }

+                 })

+             ],

+         })

+ 

+         phase = LiveImagesPhase(compose)

+ 

+         phase.run()

+ 

+         # assert at least one thread was started

+         self.assertTrue(phase.pool.add.called)

+         self.maxDiff = None

+         self.assertItemsEqual(phase.pool.queue_put.mock_calls,

+                               [mock.call((compose,

+                                           {'ks_file': 'test.ks',

+                                            'build_arch': 'amd64',

+                                            'dest_dir': self.topdir + '/compose/Client/amd64/iso',

+                                            'scratch': False,

+                                            'repos': [self.topdir + '/compose/Client/amd64/os',

+                                                      'http://example.com/repo/',

+                                                      self.topdir + '/compose/Everything/amd64/os'],

+                                            'label': '',

+                                            'name': None,

+                                            'filename': 'image-name',

+                                            'version': None,

+                                            'specfile': None,

+                                            'sign': False,

+                                            'type': 'live',

+                                            'release': '20151203.t.0',

+                                            'subvariant': 'Client',

                                             'ksurl': None},

                                            compose.variants['Client'],

                                            'amd64'))])
@@ -98,7 +147,8 @@

                                             'specfile': None,

                                             'sign': False,

                                             'type': 'live',

-                                            'release': '20151203.0',

+                                            'release': '20151203.t.0',

+                                            'subvariant': 'Client',

                                             'ksurl': None},

                                            compose.variants['Client'],

                                            'amd64'))])
@@ -145,6 +195,7 @@

                                             'sign': False,

                                             'type': 'live',

                                             'release': None,

+                                            'subvariant': 'Client',

                                             'ksurl': None},

                                            compose.variants['Client'],

                                            'amd64')),
@@ -164,6 +215,7 @@

                                             'sign': False,

                                             'type': 'live',

                                             'release': None,

+                                            'subvariant': 'Client',

                                             'ksurl': None},

                                            compose.variants['Client'],

                                            'amd64'))])
@@ -211,12 +263,62 @@

                                             'sign': False,

                                             'type': 'appliance',

                                             'release': None,

+                                            'subvariant': 'Client',

                                             'ksurl': 'https://git.example.com/kickstarts.git?#CAFEBABE'},

                                            compose.variants['Client'],

                                            'amd64'))])

          self.assertEqual(resolve_git_url.mock_calls,

                           [mock.call('https://git.example.com/kickstarts.git?#HEAD')])

  

+     @mock.patch('pungi.phases.live_images.ThreadPool')

+     def test_live_image_build_custom_type(self, ThreadPool):

+         compose = DummyCompose(self.topdir, {

+             'disc_types': {'live': 'Live'},

+             'live_images': [

+                 ('^Client$', {

+                     'amd64': {

+                         'kickstart': 'test.ks',

+                         'additional_repos': ['http://example.com/repo/'],

+                         'repo_from': ['Everything'],

+                         'release': None,

+                     }

+                 })

+             ],

+         })

+ 

+         phase = LiveImagesPhase(compose)

+ 

+         phase.run()

+ 

+         # assert at least one thread was started

+         self.assertTrue(phase.pool.add.called)

+         self.maxDiff = None

+         self.assertItemsEqual(phase.pool.queue_put.mock_calls,

+                               [mock.call((compose,

+                                           {'ks_file': 'test.ks',

+                                            'build_arch': 'amd64',

+                                            'dest_dir': self.topdir + '/compose/Client/amd64/iso',

+                                            'scratch': False,

+                                            'repos': [self.topdir + '/compose/Client/amd64/os',

+                                                      'http://example.com/repo/',

+                                                      self.topdir + '/compose/Everything/amd64/os'],

+                                            'label': '',

+                                            'name': None,

+                                            'filename': 'image-name',

+                                            'version': None,

+                                            'specfile': None,

+                                            'sign': False,

+                                            'type': 'live',

+                                            'release': '20151203.t.0',

+                                            'subvariant': 'Client',

+                                            'ksurl': None},

+                                           compose.variants['Client'],

+                                           'amd64'))])

+         self.assertItemsEqual(

+             compose.get_image_name.mock_calls,

+             [mock.call('amd64', compose.variants['Client'], disc_num=None, disc_type='Live',

+                        format='%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s')])

+ 

  

  class TestCreateLiveImageThread(PungiTestCase):

  
@@ -243,6 +345,7 @@

              'type': 'live',

              'ksurl': 'https://git.example.com/kickstarts.git?#CAFEBABE',

              'release': None,

+             'subvariant': 'Something',

          }

  

          koji_wrapper = KojiWrapper.return_value
@@ -264,7 +367,7 @@

  

          self.assertEqual(koji_wrapper.run_blocking_cmd.mock_calls,

                           [mock.call('koji spin-livecd ...',

-                                     log_file=self.topdir + '/logs/amd64/createiso-None-None-None.amd64.log')])

+                                     log_file=self.topdir + '/logs/amd64/liveimage-None-None-None.amd64.log')])

          self.assertEqual(koji_wrapper.get_image_path.mock_calls, [mock.call(123)])

          self.assertEqual(copy2.mock_calls,

                           [mock.call('/path/to/image.iso', self.topdir + '/compose/Client/amd64/iso/image-name')])
@@ -275,7 +378,7 @@

          ])

          self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])

          self.assertEqual(koji_wrapper.get_create_image_cmd.mock_calls,

-                          [mock.call('Test', '20151203.0.t', 'rhel-7.0-candidate',

+                          [mock.call('test-Something-Live-amd64', '20151203.0.t', 'rhel-7.0-candidate',

                                      'amd64', '/path/to/ks_file',

                                      ['/repo/amd64/Client',

                                       'http://example.com/repo/',
@@ -321,6 +424,7 @@

              'type': 'live',

              'ksurl': 'https://git.example.com/kickstarts.git?#CAFEBABE',

              'release': None,

+             'subvariant': 'Client',

          }

  

          koji_wrapper = KojiWrapper.return_value
@@ -342,7 +446,7 @@

  

          self.assertEqual(koji_wrapper.run_blocking_cmd.mock_calls,

                           [mock.call('koji spin-livecd ...',

-                                     log_file=self.topdir + '/logs/amd64/createiso-None-None-None.amd64.log')])

+                                     log_file=self.topdir + '/logs/amd64/liveimage-None-None-None.amd64.log')])

          self.assertEqual(koji_wrapper.get_image_path.mock_calls, [mock.call(123)])

          self.assertEqual(copy2.mock_calls,

                           [mock.call('/path/to/image.iso', self.topdir + '/compose/Client/amd64/iso/image.iso')])
@@ -353,7 +457,7 @@

          ])

          self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])

          self.assertEqual(koji_wrapper.get_create_image_cmd.mock_calls,

-                          [mock.call('Test', '20151203.0.t', 'rhel-7.0-candidate',

+                          [mock.call('test-Client-Live-amd64', '20151203.0.t', 'rhel-7.0-candidate',

                                      'amd64', '/path/to/ks_file',

                                      ['/repo/amd64/Client',

                                       'http://example.com/repo/',
@@ -400,6 +504,7 @@

              'type': 'appliance',

              'ksurl': None,

              'release': None,

+             'subvariant': 'Client',

          }

  

          koji_wrapper = KojiWrapper.return_value
@@ -421,18 +526,14 @@

  

          self.assertEqual(koji_wrapper.run_blocking_cmd.mock_calls,

                           [mock.call('koji spin-livecd ...',

-                                     log_file=self.topdir + '/logs/amd64/createiso-None-None-None.amd64.log')])

+                                     log_file=self.topdir + '/logs/amd64/liveimage-None-None-None.amd64.log')])

          self.assertEqual(koji_wrapper.get_image_path.mock_calls, [mock.call(123)])

          self.assertEqual(copy2.mock_calls,

                           [mock.call('/path/to/image-a.b-sda.raw.xz', self.topdir + '/compose/Client/amd64/iso/image-name')])

  

-         write_manifest_cmd = ' && '.join([

-             'cd ' + self.topdir + '/compose/Client/amd64/iso',

-             'isoinfo -R -f -i image-name | grep -v \'/TRANS.TBL$\' | sort >> image-name.manifest'

-         ])

-         self.assertEqual(run.mock_calls, [mock.call(write_manifest_cmd)])

+         self.assertEqual(run.mock_calls, [])

          self.assertEqual(koji_wrapper.get_create_image_cmd.mock_calls,

-                          [mock.call('Test', '20151203.0.t', 'rhel-7.0-candidate',

+                          [mock.call('test-Client-Disk-amd64', '20151203.0.t', 'rhel-7.0-candidate',

                                      'amd64', '/path/to/ks_file',

                                      ['/repo/amd64/Client',

                                       'http://example.com/repo/',
@@ -479,6 +580,9 @@

              'version': None,

              'specfile': None,

              'ksurl': None,

+             'subvariant': 'Client',

+             'release': 'xyz',

+             'type': 'live',

          }

  

          koji_wrapper = KojiWrapper.return_value
@@ -493,6 +597,12 @@

          with mock.patch('time.sleep'):

              t.process((compose, cmd, compose.variants['Client'], 'amd64'), 1)

  

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Creating live images (variant Client, arch amd64) failed, but going on anyway.'),

+             mock.call('LiveImage task failed: 123. See %s/logs/amd64/liveimage-None-None-xyz.amd64.log for more details.'

+                       % self.topdir)

+         ])

+ 

      @mock.patch('shutil.copy2')

      @mock.patch('pungi.phases.live_images.run')

      @mock.patch('pungi.phases.live_images.KojiWrapper')
@@ -516,11 +626,11 @@

              'version': None,

              'specfile': None,

              'ksurl': None,

+             'subvariant': 'Client',

+             'release': 'xyz',

+             'type': 'live',

          }

  

-         def boom(*args, **kwargs):

-             raise RuntimeError('BOOM')

- 

          koji_wrapper = KojiWrapper.return_value

          koji_wrapper.get_create_image_cmd.side_effect = boom

  
@@ -528,6 +638,11 @@

          with mock.patch('time.sleep'):

              t.process((compose, cmd, compose.variants['Client'], 'amd64'), 1)

  

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Creating live images (variant Client, arch amd64) failed, but going on anyway.'),

+             mock.call('BOOM')

+         ])

+ 

  

  if __name__ == "__main__":

      unittest.main()

file modified
+152 -21
@@ -10,10 +10,30 @@

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

  

  from pungi.phases.livemedia_phase import LiveMediaPhase, LiveMediaThread

- from tests.helpers import DummyCompose, PungiTestCase

+ from tests.helpers import DummyCompose, PungiTestCase, boom

  

  

  class TestLiveMediaPhase(PungiTestCase):

+ 

+     def test_global_config_validation(self):

+         compose = DummyCompose(self.topdir, {

+             'live_media_ksurl': 'git://example.com/repo.git#HEAD',

+             'live_media_target': 'f24',

+             'live_media_release': 'RRR',

+             'live_media_version': 'Rawhide',

+         })

+ 

+         phase = LiveMediaPhase(compose)

+         phase.validate()

+ 

+     def test_global_config_null_release(self):

+         compose = DummyCompose(self.topdir, {

+             'live_media_release': None,

+         })

+ 

+         phase = LiveMediaPhase(compose)

+         phase.validate()

+ 

      @mock.patch('pungi.phases.livemedia_phase.ThreadPool')

      def test_live_media_minimal(self, ThreadPool):

          compose = DummyCompose(self.topdir, {
@@ -52,6 +72,103 @@

                                           'title': None,

                                           'install_tree': self.topdir + '/compose/Server/$basearch/os',

                                           'version': 'Rawhide',

+                                          'subvariant': 'Server',

+                                      }))])

+ 

+     @mock.patch('pungi.phases.livemedia_phase.resolve_git_url')

+     @mock.patch('pungi.phases.livemedia_phase.ThreadPool')

+     def test_live_media_with_global_opts(self, ThreadPool, resolve_git_url):

+         compose = DummyCompose(self.topdir, {

+             'live_media_ksurl': 'git://example.com/repo.git#HEAD',

+             'live_media_target': 'f24',

+             'live_media_release': 'RRR',

+             'live_media_version': 'Rawhide',

+             'live_media': {

+                 '^Server$': [

+                     {

+                         'kickstart': 'file.ks',

+                         'name': 'Fedora Server Live',

+                     },

+                     {

+                         'kickstart': 'different.ks',

+                         'name': 'Fedora Server Live',

+                     },

+                     {

+                         'kickstart': 'yet-another.ks',

+                         'name': 'Fedora Server Live',

+                         'ksurl': 'git://different.com/repo.git',

+                         'target': 'f25',

+                         'release': 'XXX',

+                         'version': '25',

+                     }

+                 ]

+             },

+             'koji_profile': 'koji',

+         })

+ 

+         resolve_git_url.return_value = 'git://example.com/repo.git#BEEFCAFE'

+ 

+         phase = LiveMediaPhase(compose)

+ 

+         phase.run()

+         self.assertTrue(phase.pool.add.called)

+         self.assertItemsEqual(resolve_git_url.mock_calls,

+                               [mock.call('git://example.com/repo.git#HEAD'),

+                                mock.call('git://different.com/repo.git')])

+         self.assertEqual(phase.pool.queue_put.call_args_list,

+                          [mock.call((compose,

+                                      compose.variants['Server'],

+                                      {

+                                          'arches': ['amd64', 'x86_64'],

+                                          'ksfile': 'file.ks',

+                                          'ksurl': 'git://example.com/repo.git#BEEFCAFE',

+                                          'ksversion': None,

+                                          'name': 'Fedora Server Live',

+                                          'release': 'RRR',

+                                          'repo': [self.topdir + '/compose/Server/$basearch/os'],

+                                          'scratch': False,

+                                          'skip_tag': None,

+                                          'target': 'f24',

+                                          'title': None,

+                                          'install_tree': self.topdir + '/compose/Server/$basearch/os',

+                                          'version': 'Rawhide',

+                                          'subvariant': 'Server',

+                                      })),

+                           mock.call((compose,

+                                      compose.variants['Server'],

+                                      {

+                                          'arches': ['amd64', 'x86_64'],

+                                          'ksfile': 'different.ks',

+                                          'ksurl': 'git://example.com/repo.git#BEEFCAFE',

+                                          'ksversion': None,

+                                          'name': 'Fedora Server Live',

+                                          'release': 'RRR',

+                                          'repo': [self.topdir + '/compose/Server/$basearch/os'],

+                                          'scratch': False,

+                                          'skip_tag': None,

+                                          'target': 'f24',

+                                          'title': None,

+                                          'install_tree': self.topdir + '/compose/Server/$basearch/os',

+                                          'version': 'Rawhide',

+                                          'subvariant': 'Server',

+                                      })),

+                           mock.call((compose,

+                                      compose.variants['Server'],

+                                      {

+                                          'arches': ['amd64', 'x86_64'],

+                                          'ksfile': 'yet-another.ks',

+                                          'ksurl': 'git://example.com/repo.git#BEEFCAFE',

+                                          'ksversion': None,

+                                          'name': 'Fedora Server Live',

+                                          'release': 'XXX',

+                                          'repo': [self.topdir + '/compose/Server/$basearch/os'],

+                                          'scratch': False,

+                                          'skip_tag': None,

+                                          'target': 'f25',

+                                          'title': None,

+                                          'install_tree': self.topdir + '/compose/Server/$basearch/os',

+                                          'version': '25',

+                                          'subvariant': 'Server',

                                       }))])

  

      @mock.patch('pungi.phases.livemedia_phase.ThreadPool')
@@ -121,6 +238,7 @@

                          'release': None,

                          'version': 'Rawhide',

                          'install_tree_from': 'Everything',

+                         'subvariant': 'Something',

                      }

                  ]

              }
@@ -141,7 +259,7 @@

                                           'ksurl': 'resolved',

                                           'ksversion': '24',

                                           'name': 'Fedora Server Live',

-                                          'release': '20151203.0',

+                                          'release': '20151203.t.0',

                                           'repo': ['http://example.com/extra_repo',

                                                    self.topdir + '/compose/Everything/$basearch/os',

                                                    self.topdir + '/compose/Server/$basearch/os'],
@@ -151,10 +269,11 @@

                                           'title': 'Custom Title',

                                           'install_tree': self.topdir + '/compose/Everything/$basearch/os',

                                           'version': 'Rawhide',

+                                          'subvariant': 'Something',

                                       }))])

  

  

- class TestCreateImageBuildThread(PungiTestCase):

+ class TestLiveMediaThread(PungiTestCase):

  

      @mock.patch('pungi.phases.livemedia_phase.get_mtime')

      @mock.patch('pungi.phases.livemedia_phase.get_file_size')
@@ -177,6 +296,7 @@

              'target': 'f24',

              'title': None,

              'version': 'Rawhide',

+             'subvariant': 'KDE',

          }

          pool = mock.Mock()

  
@@ -213,7 +333,7 @@

          self.assertEqual(

              run_blocking_cmd.mock_calls,

              [mock.call('koji-spin-livemedia',

-                        log_file=self.topdir + '/logs/amd64-x86_64/livemedia-Server.amd64-x86_64.log')])

+                        log_file=self.topdir + '/logs/amd64-x86_64/livemedia-Server-KDE.amd64-x86_64.log')])

          self.assertEqual(get_live_media_cmd.mock_calls,

                           [mock.call({'arch': 'amd64,x86_64',

                                       'ksfile': 'file.ks',
@@ -255,9 +375,12 @@

              self.assertIn(image.path, image_relative_paths)

              self.assertEqual('iso', image.format)

              self.assertEqual('live', image.type)

+             self.assertEqual('KDE', image.subvariant)

  

+     @mock.patch('pungi.phases.livemedia_phase.get_mtime')

+     @mock.patch('pungi.phases.livemedia_phase.get_file_size')

      @mock.patch('pungi.phases.livemedia_phase.KojiWrapper')

-     def test_handle_koji_fail(self, KojiWrapper):

+     def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime):

          compose = DummyCompose(self.topdir, {

              'koji_profile': 'koji',

              'failable_deliverables': [
@@ -277,6 +400,7 @@

              'target': 'f24',

              'title': None,

              'version': 'Rawhide',

+             'subvariant': 'KDE',

          }

          pool = mock.Mock()

  
@@ -286,17 +410,23 @@

              'retcode': 1,

              'output': None,

          }

+         get_file_size.return_value = 1024

+         get_mtime.return_value.st_mtime = 13579

  

          t = LiveMediaThread(pool)

-         with mock.patch('os.stat') as stat:

-             with mock.patch('os.path.getsize') as getsize:

-                 with mock.patch('time.sleep'):

-                     getsize.return_value = 1024

-                     stat.return_value.st_mtime = 13579

-                     t.process((compose, compose.variants['Server'], config), 1)

+         with mock.patch('time.sleep'):

+             t.process((compose, compose.variants['Server'], config), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Live-media (variant Server, arch *) failed, but going on anyway.'),

+             mock.call('Live media task failed: 1234. See %s for more details.'

+                       % (os.path.join(self.topdir, 'logs/amd64-x86_64/livemedia-Server-KDE.amd64-x86_64.log')))

+         ])

  

+     @mock.patch('pungi.phases.livemedia_phase.get_mtime')

+     @mock.patch('pungi.phases.livemedia_phase.get_file_size')

      @mock.patch('pungi.phases.livemedia_phase.KojiWrapper')

-     def test_handle_exception(self, KojiWrapper):

+     def test_handle_exception(self, KojiWrapper, get_file_size, get_mtime):

          compose = DummyCompose(self.topdir, {

              'koji_profile': 'koji',

              'failable_deliverables': [
@@ -316,22 +446,23 @@

              'target': 'f24',

              'title': None,

              'version': 'Rawhide',

+             'subvariant': 'KDE',

          }

          pool = mock.Mock()

  

-         def boom(*args, **kwargs):

-             raise Exception('BOOM')

- 

          run_blocking_cmd = KojiWrapper.return_value.run_blocking_cmd

          run_blocking_cmd.side_effect = boom

+         get_file_size.return_value = 1024

+         get_mtime.return_value.st_mtime = 13579

  

          t = LiveMediaThread(pool)

-         with mock.patch('os.stat') as stat:

-             with mock.patch('os.path.getsize') as getsize:

-                 with mock.patch('time.sleep'):

-                     getsize.return_value = 1024

-                     stat.return_value.st_mtime = 13579

-                     t.process((compose, compose.variants['Server'], config), 1)

+         with mock.patch('time.sleep'):

+             t.process((compose, compose.variants['Server'], config), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Live-media (variant Server, arch *) failed, but going on anyway.'),

+             mock.call('BOOM')

+         ])

  

  

  if __name__ == "__main__":

file modified
+13 -2
@@ -1,7 +1,10 @@

  #!/usr/bin/env python2

  # -*- coding: utf-8 -*-

  

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  

  import os

  import sys
@@ -34,7 +37,11 @@

                                         variant="Server", bugurl="http://example.com/",

                                         nomacboot=True, noupgrade=True, is_final=True,

                                         buildarch='x86_64', volid='VOLUME_ID',

-                                        buildinstallpackages=['bash', 'vim'])

+                                        buildinstallpackages=['bash', 'vim'],

+                                        add_template=['t1', 't2'],

+                                        add_arch_template=['ta1', 'ta2'],

+                                        add_template_var=['v1', 'v2'],

+                                        add_arch_template_var=['va1', 'va2'])

  

          self.assertEqual(cmd[0], 'lorax')

          self.assertItemsEqual(cmd[1:],
@@ -45,6 +52,10 @@

                                 '--buildarch=x86_64', '--volid=VOLUME_ID',

                                 '--nomacboot', '--noupgrade', '--isfinal',

                                 '--installpkgs=bash', '--installpkgs=vim',

+                                '--add-template=t1', '--add-template=t2',

+                                '--add-arch-template=ta1', '--add-arch-template=ta2',

+                                '--add-template-var=v1', '--add-template-var=v2',

+                                '--add-arch-template-var=va1', '--add-arch-template-var=va2',

                                 '/mnt/output_dir'])

  

  

@@ -0,0 +1,496 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ 

+ import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

+ 

+ from tests import helpers

+ from pungi.phases import ostree_installer as ostree

+ 

+ 

+ class OstreeInstallerPhaseTest(helpers.PungiTestCase):

+ 

+     def test_validate(self):

+         compose = helpers.DummyCompose(self.topdir, {

+             'ostree_installer': [

+                 ("^Atomic$", {

+                     "x86_64": {

+                         "source_repo_from": "Everything",

+                         "release": None,

+                         "installpkgs": ["fedora-productimg-atomic"],

+                         "add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],

+                         "add_template_var": [

+                             "ostree_osname=fedora-atomic",

+                             "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",

+                         ],

+                         "add_arch_template": ["/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"],

+                         "add_arch_template_var": [

+                             "ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",

+                             "ostree_osname=fedora-atomic",

+                             "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",

+                         ]

+                     }

+                 })

+             ]

+         })

+ 

+         phase = ostree.OstreeInstallerPhase(compose)

+         try:

+             phase.validate()

+         except:

+             self.fail('Correct config must validate')

+ 

+     def test_validate_bad_conf(self):

+         compose = helpers.DummyCompose(self.topdir, {

+             'ostree_installer': 'yes please'

+         })

+ 

+         phase = ostree.OstreeInstallerPhase(compose)

+         with self.assertRaises(ValueError):

+             phase.validate()

+ 

+     @mock.patch('pungi.phases.ostree_installer.ThreadPool')

+     def test_run(self, ThreadPool):

+         cfg = mock.Mock()

+         compose = helpers.DummyCompose(self.topdir, {

+             'ostree_installer': [

+                 ('^Everything$', {'x86_64': cfg})

+             ]

+         })

+ 

+         pool = ThreadPool.return_value

+ 

+         phase = ostree.OstreeInstallerPhase(compose)

+         phase.run()

+ 

+         self.assertEqual(len(pool.add.call_args_list), 1)

+         self.assertEqual(pool.queue_put.call_args_list,

+                          [mock.call((compose, compose.variants['Everything'], 'x86_64', cfg))])

+ 

+     @mock.patch('pungi.phases.ostree_installer.ThreadPool')

+     def test_skip_without_config(self, ThreadPool):

+         compose = helpers.DummyCompose(self.topdir, {})

+         compose.just_phases = None

+         compose.skip_phases = []

+         phase = ostree.OstreeInstallerPhase(compose)

+         self.assertTrue(phase.skip())

+ 

+ 

+ class OstreeThreadTest(helpers.PungiTestCase):

+ 

+     def assertImageAdded(self, compose, ImageCls, IsoWrapper):

+         image = ImageCls.return_value

+         self.assertEqual(image.path, 'Everything/x86_64/iso/image-name')

+         self.assertEqual(image.mtime, 13579)

+         self.assertEqual(image.size, 1024)

+         self.assertEqual(image.arch, 'x86_64')

+         self.assertEqual(image.type, "boot")

+         self.assertEqual(image.format, "iso")

+         self.assertEqual(image.disc_number, 1)

+         self.assertEqual(image.disc_count, 1)

+         self.assertEqual(image.bootable, True)

+         self.assertEqual(image.implant_md5, IsoWrapper.return_value.get_implanted_md5.return_value)

+         self.assertEqual(compose.im.add.mock_calls,

+                          [mock.call('Everything', 'x86_64', image)])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run(self, KojiWrapper, link, IsoWrapper,

+                  get_file_size, get_mtime, ImageCls, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'release': '20160321.n.0',

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 0,

+             'output': 'Foo bar\n',

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+         final_iso_path = self.topdir + '/compose/Everything/x86_64/iso/image-name'

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         self.assertEqual(koji.get_runroot_cmd.call_args_list,

+                          [mock.call('rrt', 'x86_64',

+                                     ['lorax',

+                                      '--product=Fedora',

+                                      '--version=Rawhide',

+                                      '--release=20160321.n.0',

+                                      '--source=file://%s/compose/Everything/x86_64/os' % self.topdir,

+                                      '--variant=Everything',

+                                      '--nomacboot',

+                                      self.topdir + '/work/x86_64/Everything/ostree_installer'],

+                                     channel=None, mounts=[self.topdir],

+                                     packages=['pungi', 'lorax', 'ostree'],

+                                     task_id=True, use_shell=True)])

+         self.assertEqual(koji.run_runroot_cmd.call_args_list,

+                          [mock.call(koji.get_runroot_cmd.return_value,

+                                     log_file=self.topdir + '/logs/x86_64/ostree_installer/runroot.log')])

+         self.assertEqual(link.call_args_list,

+                          [mock.call(self.topdir + '/work/x86_64/Everything/ostree_installer/images/boot.iso',

+                                     final_iso_path)])

+         self.assertEqual(get_file_size.call_args_list, [mock.call(final_iso_path)])

+         self.assertEqual(get_mtime.call_args_list, [mock.call(final_iso_path)])

+         self.assertImageAdded(compose, ImageCls, IsoWrapper)

+         self.assertEqual(compose.get_image_name.call_args_list,

+                          [mock.call('x86_64', compose.variants['Everything'], disc_type='dvd')])

+         self.assertTrue(os.path.isdir(self.topdir + '/work/x86_64/Everything/'))

+         self.assertFalse(os.path.isdir(self.topdir + '/work/x86_64/Everything/ostree_installer'))

+         self.assertEqual(run.call_args_list,

+                          [mock.call('cp -av {0}/work/x86_64/Everything/ostree_installer/* {0}/compose/Everything/x86_64/iso/'.format(self.topdir))])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run_external_source(self, KojiWrapper, link, IsoWrapper,

+                                  get_file_size, get_mtime, ImageCls, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'http://example.com/repo/$arch/',

+             'release': '20160321.n.0',

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 0,

+             'output': 'Foo bar\n',

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+         final_iso_path = self.topdir + '/compose/Everything/x86_64/iso/image-name'

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         self.assertEqual(koji.get_runroot_cmd.call_args_list,

+                          [mock.call('rrt', 'x86_64',

+                                     ['lorax',

+                                      '--product=Fedora',

+                                      '--version=Rawhide',

+                                      '--release=20160321.n.0',

+                                      '--source=http://example.com/repo/x86_64/',

+                                      '--variant=Everything',

+                                      '--nomacboot',

+                                      self.topdir + '/work/x86_64/Everything/ostree_installer'],

+                                     channel=None, mounts=[self.topdir],

+                                     packages=['pungi', 'lorax', 'ostree'],

+                                     task_id=True, use_shell=True)])

+         self.assertEqual(koji.run_runroot_cmd.call_args_list,

+                          [mock.call(koji.get_runroot_cmd.return_value,

+                                     log_file=self.topdir + '/logs/x86_64/ostree_installer/runroot.log')])

+         self.assertEqual(link.call_args_list,

+                          [mock.call(self.topdir + '/work/x86_64/Everything/ostree_installer/images/boot.iso',

+                                     final_iso_path)])

+         self.assertEqual(get_file_size.call_args_list, [mock.call(final_iso_path)])

+         self.assertEqual(get_mtime.call_args_list, [mock.call(final_iso_path)])

+         self.assertImageAdded(compose, ImageCls, IsoWrapper)

+         self.assertEqual(compose.get_image_name.call_args_list,

+                          [mock.call('x86_64', compose.variants['Everything'], disc_type='dvd')])

+         self.assertTrue(os.path.isdir(self.topdir + '/work/x86_64/Everything/'))

+         self.assertFalse(os.path.isdir(self.topdir + '/work/x86_64/Everything/ostree_installer'))

+         self.assertEqual(run.call_args_list,

+                          [mock.call('cp -av {0}/work/x86_64/Everything/ostree_installer/* {0}/compose/Everything/x86_64/iso/'.format(self.topdir))])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_fail_with_relative_template_path_but_no_repo(self, KojiWrapper, link,

+                                                           IsoWrapper, get_file_size,

+                                                           get_mtime, ImageCls, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'release': '20160321.n.0',

+             'add_template': ['some-file.txt'],

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 0,

+             'output': 'Foo bar\n',

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         with self.assertRaises(RuntimeError) as ctx:

+             t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         self.assertIn('template_repo', str(ctx.exception))

+ 

+     @mock.patch('pungi.wrappers.scm.get_dir_from_scm')

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run_clone_templates(self, KojiWrapper, link, IsoWrapper,

+                                  get_file_size, get_mtime, ImageCls, run,

+                                  get_dir_from_scm):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'release': '20160321.n.0',

+             'add_template': ['some_file.txt'],

+             'add_arch_template': ['other_file.txt'],

+             'template_repo': 'git://example.com/templates.git',

+             'template_branch': 'f24',

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 0,

+             'output': 'Foo bar\n',

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+         final_iso_path = self.topdir + '/compose/Everything/x86_64/iso/image-name'

+         templ_dir = self.topdir + '/work/x86_64/Everything/lorax_templates'

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         self.assertEqual(get_dir_from_scm.call_args_list,

+                          [mock.call({'scm': 'git', 'repo': 'git://example.com/templates.git',

+                                      'branch': 'f24', 'dir': '.'},

+                                     templ_dir, logger=pool._logger)])

+         self.assertEqual(koji.get_runroot_cmd.call_args_list,

+                          [mock.call('rrt', 'x86_64',

+                                     ['lorax',

+                                      '--product=Fedora',

+                                      '--version=Rawhide',

+                                      '--release=20160321.n.0',

+                                      '--source=file://%s/compose/Everything/x86_64/os' % self.topdir,

+                                      '--variant=Everything',

+                                      '--nomacboot',

+                                      '--add-template=%s/some_file.txt' % templ_dir,

+                                      '--add-arch-template=%s/other_file.txt' % templ_dir,

+                                      self.topdir + '/work/x86_64/Everything/ostree_installer'],

+                                     channel=None, mounts=[self.topdir],

+                                     packages=['pungi', 'lorax', 'ostree'],

+                                     task_id=True, use_shell=True)])

+         self.assertEqual(koji.run_runroot_cmd.call_args_list,

+                          [mock.call(koji.get_runroot_cmd.return_value,

+                                     log_file=self.topdir + '/logs/x86_64/ostree_installer/runroot.log')])

+         self.assertEqual(link.call_args_list,

+                          [mock.call(self.topdir + '/work/x86_64/Everything/ostree_installer/images/boot.iso',

+                                     final_iso_path)])

+         self.assertEqual(get_file_size.call_args_list, [mock.call(final_iso_path)])

+         self.assertEqual(get_mtime.call_args_list, [mock.call(final_iso_path)])

+         self.assertImageAdded(compose, ImageCls, IsoWrapper)

+         self.assertEqual(compose.get_image_name.call_args_list,

+                          [mock.call('x86_64', compose.variants['Everything'], disc_type='dvd')])

+         self.assertTrue(os.path.isdir(self.topdir + '/work/x86_64/Everything/'))

+         self.assertFalse(os.path.isdir(self.topdir + '/work/x86_64/Everything/ostree_installer'))

+         self.assertEqual(run.call_args_list,

+                          [mock.call('cp -av {0}/work/x86_64/Everything/ostree_installer/* {0}/compose/Everything/x86_64/iso/'.format(self.topdir))])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run_with_implicit_release(self, KojiWrapper, link, IsoWrapper,

+                                        get_file_size, get_mtime, ImageCls, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'release': None,

+             "installpkgs": ["fedora-productimg-atomic"],

+             "add_template": ["/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl"],

+             "add_template_var": [

+                 "ostree_osname=fedora-atomic",

+                 "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",

+             ],

+             "add_arch_template": ["/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl"],

+             "add_arch_template_var": [

+                 "ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/",

+                 "ostree_osname=fedora-atomic",

+                 "ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host",

+             ],

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 0,

+             'output': 'Foo bar\n',

+         }

+         get_file_size.return_value = 1024

+         get_mtime.return_value = 13579

+         final_iso_path = self.topdir + '/compose/Everything/x86_64/iso/image-name'

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         self.assertEqual(

+             koji.get_runroot_cmd.call_args_list,

+             [mock.call('rrt', 'x86_64',

+                        ['lorax',

+                         '--product=Fedora',

+                         '--version=Rawhide', '--release=20151203.t.0',

+                         '--source=file://%s/compose/Everything/x86_64/os' % self.topdir,

+                         '--variant=Everything',

+                         '--nomacboot',

+                         '--installpkgs=fedora-productimg-atomic',

+                         '--add-template=/spin-kickstarts/atomic-installer/lorax-configure-repo.tmpl',

+                         '--add-arch-template=/spin-kickstarts/atomic-installer/lorax-embed-repo.tmpl',

+                         '--add-template-var=ostree_osname=fedora-atomic',

+                         '--add-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host',

+                         '--add-arch-template-var=ostree_repo=https://kojipkgs.fedoraproject.org/compose/atomic/Rawhide/',

+                         '--add-arch-template-var=ostree_osname=fedora-atomic',

+                         '--add-arch-template-var=ostree_ref=fedora-atomic/Rawhide/x86_64/docker-host',

+                         self.topdir + '/work/x86_64/Everything/ostree_installer'],

+                        channel=None, mounts=[self.topdir],

+                        packages=['pungi', 'lorax', 'ostree'],

+                        task_id=True, use_shell=True)])

+         self.assertEqual(koji.run_runroot_cmd.call_args_list,

+                          [mock.call(koji.get_runroot_cmd.return_value,

+                                     log_file=self.topdir + '/logs/x86_64/ostree_installer/runroot.log')])

+         self.assertEqual(link.call_args_list,

+                          [mock.call(self.topdir + '/work/x86_64/Everything/ostree_installer/images/boot.iso',

+                                     final_iso_path)])

+         self.assertEqual(get_file_size.call_args_list, [mock.call(final_iso_path)])

+         self.assertEqual(get_mtime.call_args_list, [mock.call(final_iso_path)])

+         self.assertImageAdded(compose, ImageCls, IsoWrapper)

+         self.assertEqual(compose.get_image_name.call_args_list,

+                          [mock.call('x86_64', compose.variants['Everything'], disc_type='dvd')])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_fail_crash(self, KojiWrapper, link, IsoWrapper, get_file_size,

+                         get_mtime, ImageCls, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+             'failable_deliverables': [

+                 ('^.+$', {'*': ['ostree-installer']})

+             ],

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'release': None,

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.side_effect = helpers.boom

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway.'),

+             mock.call('BOOM')

+         ])

+ 

+     @mock.patch('kobo.shortcuts.run')

+     @mock.patch('productmd.images.Image')

+     @mock.patch('pungi.util.get_mtime')

+     @mock.patch('pungi.util.get_file_size')

+     @mock.patch('pungi.wrappers.iso.IsoWrapper')

+     @mock.patch('os.link')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_fail_runroot_fail(self, KojiWrapper, link, IsoWrapper,

+                                get_file_size, get_mtime, ImageCls, run):

+         compose = helpers.DummyCompose(self.topdir, {

+             'release_name': 'Fedora',

+             'release_version': 'Rawhide',

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+             'failable_deliverables': [

+                 ('^.+$', {'*': ['ostree-installer']})

+             ],

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'release': None,

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'output': 'Failed',

+             'task_id': 1234,

+             'retcode': 1,

+         }

+ 

+         t = ostree.OstreeInstallerThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Ostree installer (variant Everything, arch x86_64) failed, but going on anyway.'),

+             mock.call('Runroot task failed: 1234. See %s/logs/x86_64/ostree_installer/runroot.log for more details.'

+                       % self.topdir)

+         ])

+ 

+ 

+ if __name__ == '__main__':

+     unittest.main()

@@ -0,0 +1,216 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ 

+ import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

+ 

+ from tests import helpers

+ from pungi.phases import ostree

+ 

+ 

+ class OSTreePhaseTest(helpers.PungiTestCase):

+ 

+     def test_validate(self):

+         compose = helpers.DummyCompose(self.topdir, {

+             'ostree': [

+                 ("^Atomic$", {

+                     "x86_64": {

+                         "treefile": "fedora-atomic-docker-host.json",

+                         "config_url": "https://git.fedorahosted.org/git/fedora-atomic.git",

+                         "source_repo_from": "Everything",

+                         "ostree_repo": "/mnt/koji/compose/atomic/Rawhide/"

+                     }

+                 })

+             ]

+         })

+ 

+         phase = ostree.OSTreePhase(compose)

+         try:

+             phase.validate()

+         except:

+             self.fail('Correct config must validate')

+ 

+     def test_validate_bad_conf(self):

+         compose = helpers.DummyCompose(self.topdir, {

+             'ostree': 'yes please'

+         })

+ 

+         phase = ostree.OSTreePhase(compose)

+         with self.assertRaises(ValueError):

+             phase.validate()

+ 

+     @mock.patch('pungi.phases.ostree.ThreadPool')

+     def test_run(self, ThreadPool):

+         cfg = mock.Mock()

+         compose = helpers.DummyCompose(self.topdir, {

+             'ostree': [

+                 ('^Everything$', {'x86_64': cfg})

+             ]

+         })

+ 

+         pool = ThreadPool.return_value

+ 

+         phase = ostree.OSTreePhase(compose)

+         phase.run()

+ 

+         self.assertEqual(len(pool.add.call_args_list), 1)

+         self.assertEqual(pool.queue_put.call_args_list,

+                          [mock.call((compose, compose.variants['Everything'], 'x86_64', cfg))])

+ 

+     @mock.patch('pungi.phases.ostree.ThreadPool')

+     def test_skip_without_config(self, ThreadPool):

+         compose = helpers.DummyCompose(self.topdir, {})

+         compose.just_phases = None

+         compose.skip_phases = []

+         phase = ostree.OSTreePhase(compose)

+         self.assertTrue(phase.skip())

+ 

+ 

+ class OSTreeThreadTest(helpers.PungiTestCase):

+ 

+     def _dummy_config_repo(self, scm_dict, target, logger=None):

+         helpers.touch(os.path.join(target, 'fedora-atomic-docker-host.json'))

+         helpers.touch(os.path.join(target, 'fedora-rawhide.repo'),

+                       'mirrorlist=mirror-mirror-on-the-wall')

+         helpers.touch(os.path.join(target, 'fedora-24.repo'),

+                       'metalink=who-is-the-fairest-of-them-all')

+         helpers.touch(os.path.join(target, 'fedora-23.repo'),

+                       'baseurl=why-not-zoidberg?')

+ 

+     @mock.patch('pungi.wrappers.scm.get_dir_from_scm')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run(self, KojiWrapper, get_dir_from_scm):

+         get_dir_from_scm.side_effect = self._dummy_config_repo

+ 

+         compose = helpers.DummyCompose(self.topdir, {

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+             'translate_paths': [

+                 (self.topdir + '/compose', 'http://example.com')

+             ]

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git',

+             'config_branch': 'f24',

+             'treefile': 'fedora-atomic-docker-host.json',

+             'ostree_repo': '/other/place/for/atomic'

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 0,

+             'output': 'Foo bar\n',

+         }

+ 

+         t = ostree.OSTreeThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         self.assertEqual(get_dir_from_scm.call_args_list,

+                          [mock.call({'scm': 'git', 'repo': 'https://git.fedorahosted.org/git/fedora-atomic.git',

+                                      'branch': 'f24', 'dir': '.'},

+                                     self.topdir + '/work/ostree/config_repo', logger=pool._logger)])

+         self.assertEqual(koji.get_runroot_cmd.call_args_list,

+                          [mock.call('rrt', 'x86_64',

+                                     ['pungi-make-ostree',

+                                      '--log-dir=%s/logs/x86_64/ostree' % self.topdir,

+                                      '--treefile=%s/fedora-atomic-docker-host.json' % (

+                                          self.topdir + '/work/ostree/config_repo'),

+                                      '/other/place/for/atomic'],

+                                     channel=None, mounts=[self.topdir, '/other/place/for/atomic'],

+                                     packages=['pungi', 'ostree', 'rpm-ostree'],

+                                     task_id=True, use_shell=True)])

+         self.assertEqual(koji.run_runroot_cmd.call_args_list,

+                          [mock.call(koji.get_runroot_cmd.return_value,

+                                     log_file=self.topdir + '/logs/x86_64/ostree/runroot.log')])

+ 

+         with open(self.topdir + '/work/ostree/config_repo/fedora-rawhide.repo') as f:

+             self.assertIn('baseurl=http://example.com/Everything/x86_64/os'.format(self.topdir),

+                           f.read())

+         with open(self.topdir + '/work/ostree/config_repo/fedora-24.repo') as f:

+             self.assertIn('baseurl=http://example.com/Everything/x86_64/os'.format(self.topdir),

+                           f.read())

+         with open(self.topdir + '/work/ostree/config_repo/fedora-23.repo') as f:

+             self.assertIn('baseurl=http://example.com/Everything/x86_64/os'.format(self.topdir),

+                           f.read())

+ 

+     @mock.patch('pungi.wrappers.scm.get_dir_from_scm')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run_fail(self, KojiWrapper, get_dir_from_scm):

+         get_dir_from_scm.side_effect = self._dummy_config_repo

+ 

+         compose = helpers.DummyCompose(self.topdir, {

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+             'failable_deliverables': [

+                 ('^.*$', {'*': ['ostree']})

+             ]

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git',

+             'config_branch': 'f24',

+             'treefile': 'fedora-atomic-docker-host.json',

+             'ostree_repo': '/other/place/for/atomic'

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.return_value = {

+             'task_id': 1234,

+             'retcode': 1,

+             'output': 'Foo bar\n',

+         }

+ 

+         t = ostree.OSTreeThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway.'),

+             mock.call('Runroot task failed: 1234. See %s for more details.'

+                       % (self.topdir + '/logs/x86_64/ostree/runroot.log'))

+         ])

+ 

+     @mock.patch('pungi.wrappers.scm.get_dir_from_scm')

+     @mock.patch('pungi.wrappers.kojiwrapper.KojiWrapper')

+     def test_run_handle_exception(self, KojiWrapper, get_dir_from_scm):

+         get_dir_from_scm.side_effect = self._dummy_config_repo

+ 

+         compose = helpers.DummyCompose(self.topdir, {

+             'koji_profile': 'koji',

+             'runroot_tag': 'rrt',

+             'failable_deliverables': [

+                 ('^.*$', {'*': ['ostree']})

+             ]

+         })

+         pool = mock.Mock()

+         cfg = {

+             'source_repo_from': 'Everything',

+             'config_url': 'https://git.fedorahosted.org/git/fedora-atomic.git',

+             'config_branch': 'f24',

+             'treefile': 'fedora-atomic-docker-host.json',

+             'ostree_repo': '/other/place/for/atomic'

+         }

+         koji = KojiWrapper.return_value

+         koji.run_runroot_cmd.side_effect = helpers.boom

+ 

+         t = ostree.OSTreeThread(pool)

+ 

+         t.process((compose, compose.variants['Everything'], 'x86_64', cfg), 1)

+ 

+         compose.log_info.assert_has_calls([

+             mock.call('[FAIL] Ostree (variant Everything, arch x86_64) failed, but going on anyway.'),

+             mock.call('BOOM')

+         ])

+ 

+ 

+ if __name__ == '__main__':

+     unittest.main()

@@ -0,0 +1,41 @@

+ #!/usr/bin/env python

+ # -*- coding: utf-8 -*-

+ 

+ 

+ import unittest

+ import mock

+ 

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'bin'))

+ 

+ from tests import helpers

+ from pungi import ostree

+ 

+ 

+ class OstreeScriptTest(helpers.PungiTestCase):

+ 

+     @mock.patch('kobo.shortcuts.run')

+     def test_full_run(self, run):

+         repo = os.path.join(self.topdir, 'atomic')

+ 

+         ostree.main([

+             '--log-dir=%s' % os.path.join(self.topdir, 'logs', 'Atomic'),

+             '--treefile=%s/fedora-atomic-docker-host.json' % self.topdir,

+             repo,

+         ])

+ 

+         self.maxDiff = None

+         self.assertItemsEqual(

+             run.call_args_list,

+             [mock.call(['ostree', 'init', '--repo=%s' % repo, '--mode=archive-z2'],

+                        logfile=self.topdir + '/logs/Atomic/init-ostree-repo.log', show_cmd=True, stdout=True),

+              mock.call(['rpm-ostree', 'compose', 'tree', '--repo=%s' % repo,

+                         self.topdir + '/fedora-atomic-docker-host.json'],

+                        logfile=self.topdir + '/logs/Atomic/create-ostree-repo.log', show_cmd=True, stdout=True)])

+ 

+ 

+ if __name__ == '__main__':

+     unittest.main()

file added
+36
@@ -0,0 +1,36 @@

+ #!/usr/bin/env python2

+ # -*- coding: utf-8 -*-

+ 

+ import mock

+ import unittest

+ import os

+ import sys

+ 

+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

+ 

+ from pungi import paths

+ 

+ 

+ class TranslatePathTestCase(unittest.TestCase):

+     def test_does_nothing_without_config(self):

+         compose = mock.Mock(conf={})

+         ret = paths.translate_path(compose, '/mnt/koji/compose/rawhide/XYZ')

+         self.assertEqual(ret, '/mnt/koji/compose/rawhide/XYZ')

+ 

+     def test_translates_prefix(self):

+         compose = mock.Mock(conf={

+             'translate_paths': [('/mnt/koji', 'http://example.com')]

+         })

+         ret = paths.translate_path(compose, '/mnt/koji/compose/rawhide/XYZ')

+         self.assertEqual(ret, 'http://example.com/compose/rawhide/XYZ')

+ 

+     def test_does_not_translate_not_matching(self):

+         compose = mock.Mock(conf={

+             'translate_paths': [('/mnt/koji', 'http://example.com')]

+         })

+         ret = paths.translate_path(compose, '/mnt/fedora_koji/compose/rawhide/XYZ')

+         self.assertEqual(ret, '/mnt/fedora_koji/compose/rawhide/XYZ')

+ 

+ 

+ if __name__ == "__main__":

+     unittest.main()

file modified
+93 -1
@@ -4,7 +4,10 @@

  import mock

  import os

  import sys

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  import tempfile

  import shutil

  
@@ -13,6 +16,8 @@

  from pungi import compose

  from pungi import util

  

+ from tests.helpers import touch, PungiTestCase

+ 

  

  class TestGitRefResolver(unittest.TestCase):

  
@@ -26,6 +31,15 @@

          run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

  

      @mock.patch('pungi.util.run')

+     def test_successful_resolve_branch(self, run):

+         run.return_value = (0, 'CAFEBABE\trefs/heads/f24\n')

+ 

+         url = util.resolve_git_url('https://git.example.com/repo.git?somedir#origin/f24')

+ 

+         self.assertEqual(url, 'https://git.example.com/repo.git?somedir#CAFEBABE')

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'refs/heads/f24'])

+ 

+     @mock.patch('pungi.util.run')

      def test_resolve_missing_spec(self, run):

          url = util.resolve_git_url('https://git.example.com/repo.git')

  
@@ -57,6 +71,15 @@

          run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

          self.assertEqual(url, 'https://git.example.com/repo.git?#CAFEBABE')

  

+     @mock.patch('pungi.util.run')

+     def test_resolve_strip_git_plus_prefix(self, run):

+         run.return_value = (0, 'CAFEBABE\tHEAD\n')

+ 

+         url = util.resolve_git_url('git+https://git.example.com/repo.git#HEAD')

+ 

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

+         self.assertEqual(url, 'git+https://git.example.com/repo.git#CAFEBABE')

+ 

  

  class TestGetVariantData(unittest.TestCase):

      def test_get_simple(self):
@@ -133,5 +156,74 @@

              self.assertEqual(volid, expected)

  

  

+ class TestFindOldCompose(unittest.TestCase):

+     def setUp(self):

+         self.tmp_dir = tempfile.mkdtemp()

+ 

+     def tearDown(self):

+         shutil.rmtree(self.tmp_dir)

+ 

+     def test_finds_single(self):

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160229.0/STATUS', 'FINISHED')

+         old = util.find_old_compose(self.tmp_dir, 'Fedora', 'Rawhide')

+         self.assertEqual(old, self.tmp_dir + '/Fedora-Rawhide-20160229.0')

+ 

+     def test_ignores_in_progress(self):

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160229.0/STATUS', 'STARTED')

+         old = util.find_old_compose(self.tmp_dir, 'Fedora', 'Rawhide')

+         self.assertIsNone(old)

+ 

+     def test_finds_latest(self):

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160228.0/STATUS', 'DOOMED')

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160229.0/STATUS', 'FINISHED')

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160229.1/STATUS', 'FINISHED_INCOMPLETE')

+         old = util.find_old_compose(self.tmp_dir, 'Fedora', 'Rawhide')

+         self.assertEqual(old, self.tmp_dir + '/Fedora-Rawhide-20160229.1')

+ 

+     def test_finds_ignores_other_files(self):

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160229.0', 'not a compose')

+         touch(self.tmp_dir + '/Fedora-Rawhide-20160228.0/STATUS/file', 'also not a compose')

+         touch(self.tmp_dir + '/Fedora-24-20160229.0/STATUS', 'FINISHED')

+         touch(self.tmp_dir + '/Another-Rawhide-20160229.0/STATUS', 'FINISHED')

+         old = util.find_old_compose(self.tmp_dir, 'Fedora', 'Rawhide')

+         self.assertIsNone(old)

+ 

+     def test_search_in_file(self):

+         touch(self.tmp_dir + '/file')

+         old = util.find_old_compose(self.tmp_dir + '/file', 'Fedora', 'Rawhide')

+         self.assertIsNone(old)

+ 

+     def test_skips_symlink(self):

+         os.symlink(self.tmp_dir, self.tmp_dir + '/Fedora-Rawhide-20160229.0')

+         old = util.find_old_compose(self.tmp_dir, 'Fedora', 'Rawhide')

+         self.assertIsNone(old)

+ 

+     def test_finds_layered_product(self):

+         touch(self.tmp_dir + '/Fedora-Rawhide-Base-1-20160229.0/STATUS', 'FINISHED')

+         old = util.find_old_compose(self.tmp_dir, 'Fedora', 'Rawhide',

+                                     base_product_short='Base', base_product_version='1')

+         self.assertEqual(old, self.tmp_dir + '/Fedora-Rawhide-Base-1-20160229.0')

+ 

+ 

+ class TestHelpers(PungiTestCase):

+     def test_process_args(self):

+         self.assertEqual(util.process_args('--opt=%s', None), [])

+         self.assertEqual(util.process_args('--opt=%s', []), [])

+         self.assertEqual(util.process_args('--opt=%s', ['foo', 'bar']),

+                          ['--opt=foo', '--opt=bar'])

+         self.assertEqual(util.process_args('--opt=%s', 'foo'), ['--opt=foo'])

+ 

+     def test_makedirs(self):

+         util.makedirs(self.topdir + '/foo/bar/baz')

+         self.assertTrue(os.path.isdir(self.topdir + '/foo/bar/baz'))

+ 

+     def test_makedirs_on_existing(self):

+         os.makedirs(self.topdir + '/foo/bar/baz')

+         try:

+             util.makedirs(self.topdir + '/foo/bar/baz')

+         except OSError:

+             self.fail('makedirs raised exception on existing directory')

+ 

+ 

  if __name__ == "__main__":

      unittest.main()