#739 Port to Python 3
Merged 6 years ago by lsedlar. Opened 6 years ago by lsedlar.
lsedlar/pungi py3  into  master

file modified
+3 -4
@@ -11,8 +11,7 @@ 

  import datetime

  import getpass

  import socket

- import time

- import pipes

+ from six.moves import shlex_quote

  import json

  

  here = sys.path[0]
@@ -267,7 +266,7 @@ 

      compose.log_info("Pungi version: %s" % get_full_version())

      compose.log_info("User name: %s" % getpass.getuser())

      compose.log_info("Working directory: %s" % os.getcwd())

-     compose.log_info("Command line: %s" % " ".join([pipes.quote(arg) for arg in sys.argv]))

+     compose.log_info("Command line: %s" % " ".join([shlex_quote(arg) for arg in sys.argv]))

      compose.log_info("Compose top directory: %s" % compose.topdir)

      compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())

      compose.read_variants()
@@ -475,7 +474,7 @@ 

              COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)

              COMPOSE.write_status("DOOMED")

              import kobo.tback

-             with open(tb_path, "w") as f:

+             with open(tb_path, "wb") as f:

                  f.write(kobo.tback.Traceback().get_traceback())

          else:

              print("Exception: %s" % ex)

file modified
+6 -4
@@ -575,10 +575,12 @@ 

          * With ``greedy_method = "build" ``pkg-b-provider-1`` and

            ``pkg-b-provider-2`` will be pulled in.

  

- **gather_backend** = ``yum``

-     (*str*) -- Either ``yum`` or ``dnf``. This changes the entire codebase

-     doing dependency solving, so it can change the result in unpredictable

-     ways.

+ **gather_backend**

+     (*str*) --This changes the entire codebase doing dependency solving, so it

+     can change the result in unpredictable ways.

+ 

+     On Python 2, the choice is between ``yum`` or ``dnf`` and defaults to

+     ``yum``. On Python 3 ``dnf`` is the only option and default.

  

      Particularly the multilib work is performed differently by using

      ``python-multilib`` library. Please refer to ``multilib`` option to see the

file modified
+1 -1
@@ -13,7 +13,7 @@ 

      if os.path.isdir(os.path.join(location, '.git')):

          import subprocess

          proc = subprocess.Popen(['git', '--git-dir=%s/.git' % location, 'describe', '--tags'],

-                                 stdout=subprocess.PIPE)

+                                 stdout=subprocess.PIPE, universal_newlines=True)

          output, _ = proc.communicate()

          return re.sub(r'-1.fc\d\d?', '', output.strip().replace('pungi-', ''))

      else:

file modified
+16 -7
@@ -1,6 +1,5 @@ 

  # -*- coding: utf-8 -*-

  

- 

  # This program is free software; you can redistribute it and/or modify

  # it under the terms of the GNU General Public License as published by

  # the Free Software Foundation; version 2 of the License.
@@ -152,7 +151,7 @@ 

      def has_default(x):

          return schema['properties'].get(x, {}).get('default') == conf[x]

  

-     for name, opt in valid_options.iteritems():

+     for name, opt in valid_options.items():

          value = conf.get(name)

  

          errors.extend(_check_dep(name, value, opt.get('conflicts', []),
@@ -178,7 +177,7 @@ 

      DefaultValidator = _extend_with_default_and_alias(jsonschema.Draft4Validator)

      validator = DefaultValidator(schema,

                                   {'array': (tuple, list),

-                                   'regex': (str, unicode)})

+                                   'regex': six.string_types})

      errors = []

      warnings = []

      for error in validator.iter_errors(config):
@@ -239,7 +238,7 @@ 

          Hook the instance and yield errors and warnings.

          """

          errors = []

-         for property, subschema in properties.iteritems():

+         for property, subschema in properties.items():

              # update instance for alias option

              # If alias option for the property is present and property is not specified,

              # update the property in instance with value from alias option.
@@ -285,7 +284,7 @@ 

          Assign default values to options that have them defined and are not

          specified.

          """

-         for property, subschema in properties.iteritems():

+         for property, subschema in properties.items():

              if "default" in subschema and property not in instance:

                  instance.setdefault(property, subschema["default"])

  
@@ -582,8 +581,8 @@ 

              "gather_source_mapping": {"type": "string"},

              "gather_backend": {

                  "type": "string",

-                 "enum": ["yum", "dnf"],

-                 "default": "yum",

+                 "enum": _get_gather_backends(),

+                 "default": _get_default_gather_backend(),

              },

              "gather_profiler": {

                  "type": "boolean",
@@ -1204,3 +1203,13 @@ 

          ],

      },

  }

+ 

+ 

+ def _get_gather_backends():

+     if six.PY2:

+         return ['yum', 'dnf']

+     return ['dnf']

+ 

+ 

+ def _get_default_gather_backend():

+     return 'yum' if six.PY2 else 'dnf'

file modified
+2 -2
@@ -236,7 +236,7 @@ 

  

      def get_variants(self, types=None, arch=None):

          result = []

-         for i in self.variants.itervalues():

+         for i in self.variants.values():

              if (not types or i.type in types) and (not arch or arch in i.arches):

                  result.append(i)

              result.extend(i.get_variants(types=types, arch=arch))
@@ -257,7 +257,7 @@ 

          return self._status_file

  

      def _log_failed_deliverables(self):

-         for kind, data in self.failed_deliverables.iteritems():

+         for kind, data in self.failed_deliverables.items():

              for variant, arch, subvariant in data:

                  self.log_info('Failed %s on variant <%s>, arch <%s>, subvariant <%s>.'

                                % (kind, variant, arch, subvariant))

file modified
+3 -3
@@ -3,9 +3,9 @@ 

  from __future__ import print_function

  

  import os

- import pipes

  import six

  from collections import namedtuple

+ from six.moves import shlex_quote

  

  from .wrappers import iso

  from .wrappers.jigdo import JigdoWrapper
@@ -22,8 +22,8 @@ 

      expanded.

      """

      if str.startswith('$TEMPLATE'):

-         return '$TEMPLATE%s' % pipes.quote(str.replace('$TEMPLATE', '', 1))

-     return pipes.quote(str)

+         return '$TEMPLATE%s' % shlex_quote(str.replace('$TEMPLATE', '', 1))

+     return shlex_quote(str)

  

  

  def emit(f, cmd):

file modified
+4 -4
@@ -242,12 +242,12 @@ 

  

          # return package with shortest name, alphabetically ordered

          result = list(result)

-         result.sort(lambda x, y: cmp(x.name, y.name))

-         result.sort(lambda x, y: cmp(len(x.name), len(y.name)))

+         result.sort(key=lambda x: x.name)

+         result.sort(key=lambda x: len(x.name))

  

          # best arch

          arches = self.dnf.arch_wrapper.all_arches

-         result.sort(lambda x, y: cmp(arches.index(x.arch), arches.index(y.arch)))

+         result.sort(key=lambda x: arches.index(x.arch))

          match = result[0]

  

          if self.opts.greedy_method == "build" and req:
@@ -405,7 +405,7 @@ 

                  for po in pkgs:

                      packages_by_name.setdefault(po.name, []).append(po)

  

-                 for name, packages in packages_by_name.iteritems():

+                 for name, packages in packages_by_name.items():

                      pkgs = self._get_best_package(packages)

                      if pkgs:

                          added.update(pkgs)

file modified
+1 -1
@@ -176,7 +176,7 @@ 

      for variant in ci_copy.variants.variants.values():

          for field in variant.paths._fields:

              field_paths = getattr(variant.paths, field)

-             for arch, dirpath in field_paths.iteritems():

+             for arch, dirpath in field_paths.items():

                  dirpath = os.path.join(compose.paths.compose.topdir(), dirpath)

                  if not (os.path.isdir(dirpath) and os.listdir(dirpath)):

                      field_paths[arch] = None

file modified
+1 -1
@@ -53,7 +53,7 @@ 

              return False

          if pkg.name in self.whitelist:

              return 'whitelist'

-         for method, cls in self.methods.iteritems():

+         for method, cls in self.methods.items():

              if cls.select(pkg):

                  return method

          return False

file modified
+6 -6
@@ -17,13 +17,13 @@ 

  import errno

  import os

  import time

- import pipes

  import shutil

  import re

  

  from kobo.threads import ThreadPool, WorkerThread

  from kobo.shortcuts import run

  from productmd.images import Image

+ from six.moves import shlex_quote

  

  from pungi.arch import get_valid_arches

  from pungi.util import get_volid, get_arch_variant_data
@@ -100,8 +100,8 @@ 

                                          add_arch_template_var=add_arch_template_var,

                                          noupgrade=noupgrade,

                                          log_dir=log_dir)

-         return 'rm -rf %s && %s' % (pipes.quote(output_dir),

-                                     ' '.join([pipes.quote(x) for x in lorax_cmd]))

+         return 'rm -rf %s && %s' % (shlex_quote(output_dir),

+                                     ' '.join([shlex_quote(x) for x in lorax_cmd]))

  

      def run(self):

          lorax = LoraxWrapper()
@@ -276,7 +276,7 @@ 

  

      # copy src to temp

      # TODO: place temp on the same device as buildinstall dir so we can hardlink

-     cmd = "cp -av --remove-destination %s/* %s/" % (pipes.quote(src), pipes.quote(tmp_dir))

+     cmd = "cp -av --remove-destination %s/* %s/" % (shlex_quote(src), shlex_quote(tmp_dir))

      run(cmd)

  

      found_configs = tweak_configs(tmp_dir, volid, kickstart_file)
@@ -300,10 +300,10 @@ 

                      run(cmd)

  

      # HACK: make buildinstall files world readable

-     run("chmod -R a+rX %s" % pipes.quote(tmp_dir))

+     run("chmod -R a+rX %s" % shlex_quote(tmp_dir))

  

      # copy temp to dst

-     cmd = "cp -av --remove-destination %s/* %s/" % (pipes.quote(tmp_dir), pipes.quote(dst))

+     cmd = "cp -av --remove-destination %s/* %s/" % (shlex_quote(tmp_dir), shlex_quote(dst))

      run(cmd)

  

      shutil.rmtree(tmp_dir)

file modified
+2 -2
@@ -16,7 +16,6 @@ 

  

  import os

  import time

- import pipes

  import random

  import shutil

  
@@ -24,6 +23,7 @@ 

  from productmd.images import Image

  from kobo.threads import ThreadPool, WorkerThread

  from kobo.shortcuts import run, relative_path

+ from six.moves import shlex_quote

  

  from pungi.wrappers import iso

  from pungi.wrappers.createrepo import CreaterepoWrapper
@@ -415,7 +415,7 @@ 

  

          if file_list_content:

              # write modified repodata only if there are packages available

-             run("cp -a %s/repodata %s/" % (pipes.quote(tree_dir), pipes.quote(iso_dir)))

+             run("cp -a %s/repodata %s/" % (shlex_quote(tree_dir), shlex_quote(iso_dir)))

              with open(file_list, "w") as f:

                  f.write("\n".join(file_list_content))

              cmd = repo.get_createrepo_cmd(tree_dir, update=True, database=True, skip_stat=True, pkglist=file_list, outputdir=iso_dir, workers=3, checksum=createrepo_checksum)

file modified
+4 -4
@@ -130,11 +130,11 @@ 

      manifest = productmd.rpms.Rpms()

      manifest.load(manifest_file)

  

-     for rpms_arch, data in manifest.rpms.get(variant.uid, {}).iteritems():

+     for rpms_arch, data in manifest.rpms.get(variant.uid, {}).items():

          if arch is not None and arch != rpms_arch:

              continue

-         for srpm_data in data.itervalues():

-             for rpm_nevra, rpm_data in srpm_data.iteritems():

+         for srpm_data in data.values():

+             for rpm_nevra, rpm_data in srpm_data.items():

                  if types[pkg_type][0] != rpm_data['category']:

                      continue

                  path = os.path.join(compose.topdir, "compose", rpm_data["path"])
@@ -185,7 +185,7 @@ 

      if arch in variant.arch_mmds:

          import yaml

          modules = []

-         for mmd in variant.arch_mmds[arch].itervalues():

+         for mmd in variant.arch_mmds[arch].values():

              # Create copy of architecture specific mmd to filter out packages

              # which are not part of this particular repo.

              repo_mmd = copy.deepcopy(mmd)

@@ -31,7 +31,7 @@ 

  

  def get_gather_source(name):

      import pungi.phases.gather.sources

-     from source import GatherSourceContainer

+     from .source import GatherSourceContainer

      GatherSourceContainer.register_module(pungi.phases.gather.sources)

      container = GatherSourceContainer()

      return container["GatherSource%s" % name]
@@ -39,7 +39,7 @@ 

  

  def get_gather_method(name):

      import pungi.phases.gather.methods

-     from method import GatherMethodContainer

+     from .method import GatherMethodContainer

      GatherMethodContainer.register_module(pungi.phases.gather.methods)

      container = GatherMethodContainer()

      return container["GatherMethod%s" % name]
@@ -100,7 +100,7 @@ 

      result = _mk_pkg_map(iterable_class=set)

      if variant.parent is None:

          return result

-     for pkg_type, pkgs in result_dict.get(arch, {}).get(variant.parent.uid, {}).iteritems():

+     for pkg_type, pkgs in result_dict.get(arch, {}).get(variant.parent.uid, {}).items():

          for pkg in pkgs:

              nvra = parse_nvra(pkg["path"])

              result[pkg_type].add((nvra["name"], nvra["arch"]))
@@ -142,7 +142,7 @@ 

      msg = "Writing package list (arch: %s, variant: %s)" % (arch, variant)

      compose.log_info("[BEGIN] %s" % msg)

  

-     for pkg_type, pkgs in pkg_map.iteritems():

+     for pkg_type, pkgs in pkg_map.items():

          file_name = compose.paths.work.package_list(arch=arch, variant=variant, pkg_type=pkg_type)

          with open(file_name, "w") as pkg_list:

              for pkg in pkgs:
@@ -188,7 +188,7 @@ 

      addon_pkgs = _mk_pkg_map(iterable_class=set)

      move_to_parent_pkgs = _mk_pkg_map()

      removed_pkgs = _mk_pkg_map()

-     for pkg_type, pkgs in pkg_map.iteritems():

+     for pkg_type, pkgs in pkg_map.items():

  

          new_pkgs = []

          for pkg in pkgs:
@@ -262,13 +262,13 @@ 

                  compose, arch, variant, pkg_map, parent_pkgs, remove_pkgs=remove_pkgs)

  

              # update all_addon_pkgs

-             for pkg_type, pkgs in included_packages.iteritems():

+             for pkg_type, pkgs in included_packages.items():

                  all_included_packages.setdefault(pkg_type, set()).update(pkgs)

  

              if move_to_parent:

                  # move packages to parent

                  parent_pkg_map = result[arch][variant.parent.uid]

-                 for pkg_type, pkgs in move_to_parent_pkgs.iteritems():

+                 for pkg_type, pkgs in move_to_parent_pkgs.items():

                      for pkg in pkgs:

                          compose.log_debug("Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s"

                                            % (arch, variant.uid, pkg_type, os.path.basename(pkg["path"])))
@@ -355,7 +355,7 @@ 

      variants = [variant.uid] if variant else prepopulate_data.keys()

  

      for var in variants:

-         for build, packages in prepopulate_data.get(var, {}).get(arch, {}).iteritems():

+         for build, packages in prepopulate_data.get(var, {}).get(arch, {}).items():

              for i in packages:

                  pkg_name, pkg_arch = split_name_arch(i)

                  if pkg_arch not in get_compatible_arches(arch, multilib=True):

@@ -48,12 +48,12 @@ 

          if variant is None:

              # get all packages for all variants

              for variant_uid in mapping:

-                 for pkg_name, pkg_arches in mapping[variant_uid][arch].iteritems():

+                 for pkg_name, pkg_arches in mapping[variant_uid][arch].items():

                      for pkg_arch in pkg_arches:

                          packages.add((pkg_name, pkg_arch))

          else:

              # get packages for a particular variant

-             for pkg_name, pkg_arches in mapping[variant.uid][arch].iteritems():

+             for pkg_name, pkg_arches in mapping[variant.uid][arch].items():

                  for pkg_arch in pkg_arches:

                      packages.add((pkg_name, pkg_arch))

          return packages, set()

file modified
+1 -1
@@ -194,7 +194,7 @@ 

  

          paths = koji_wrapper.get_image_paths(output["task_id"])

  

-         for arch, paths in paths.iteritems():

+         for arch, paths in paths.items():

              for path in paths:

                  # format is list of tuples [('qcow2', '.qcow2'), ('raw-xz', 'raw.xz'),]

                  for format, suffix in cmd['format']:

@@ -89,7 +89,7 @@ 

              # digest from first run..

              cache[full_path] = shortcuts.compute_file_checksums(full_path, checksum_types)

          digests = cache[full_path]

-         for checksum, digest in digests.iteritems():

+         for checksum, digest in digests.items():

              # Update metadata with the checksum

              image.add_checksum(None, checksum, digest)

              # If not turned of, create the file-specific checksum file
@@ -112,7 +112,7 @@ 

  def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen):

      results = defaultdict(set)

      cache = {}

-     for (variant, arch, path), images in get_images(topdir, im).iteritems():

+     for (variant, arch, path), images in get_images(topdir, im).items():

          _compute_checksums(results, cache, variant, arch, path, images,

                             checksum_types, base_checksum_name_gen, one_file)

  

file modified
+2 -2
@@ -17,12 +17,12 @@ 

  import os

  import sys

  import time

- import pipes

  import shutil

  

  from kobo.threads import ThreadPool, WorkerThread

  from kobo.shortcuts import run, save_to_file, force_list

  from productmd.images import Image

+ from six.moves import shlex_quote

  

  from pungi.wrappers.kojiwrapper import KojiWrapper

  from pungi.wrappers import iso
@@ -254,7 +254,7 @@ 

          :param iso_path: (str) absolute path to the ISO

          """

          dir, filename = os.path.split(iso_path)

-         run("cd %s && %s" % (pipes.quote(dir), iso.get_manifest_cmd(filename)))

+         run("cd %s && %s" % (shlex_quote(dir), iso.get_manifest_cmd(filename)))

  

      def _sign_image(self, koji_wrapper, compose, cmd, koji_task_id):

          signing_key_id = compose.conf.get("signing_key_id")

@@ -134,7 +134,7 @@ 

  

          paths = koji_wrapper.get_image_paths(output['task_id'])

  

-         for arch, paths in paths.iteritems():

+         for arch, paths in paths.items():

              for path in paths:

                  if path.endswith('.iso'):

                      image_infos.append({'path': path, 'arch': arch})

@@ -4,7 +4,7 @@ 

  from kobo.threads import ThreadPool, WorkerThread

  import shutil

  from productmd import images

- import pipes

+ from six.moves import shlex_quote

  from kobo import shortcuts

  

  from .base import ConfigGuardedPhase, PhaseLoggerMixin
@@ -99,7 +99,7 @@ 

          boot_iso = os.path.join(output_dir, 'images', 'boot.iso')

  

          shortcuts.run('cp -av %s/* %s/' %

-                       (pipes.quote(output_dir), pipes.quote(os_path)))

+                       (shlex_quote(output_dir), shlex_quote(os_path)))

          try:

              os.link(boot_iso, iso_path)

          except OSError:
@@ -164,8 +164,8 @@ 

              is_final=compose.supported,

              log_dir=self.logdir,

          )

-         cmd = 'rm -rf %s && %s' % (pipes.quote(output_dir),

-                                    ' '.join([pipes.quote(x) for x in lorax_cmd]))

+         cmd = 'rm -rf %s && %s' % (shlex_quote(output_dir),

+                                    ' '.join([shlex_quote(x) for x in lorax_cmd]))

  

          runroot_channel = compose.conf.get("runroot_channel")

          runroot_tag = compose.conf["runroot_tag"]

@@ -23,8 +23,8 @@ 

  

      def run(self):

          pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]

-         from source import PkgsetSourceContainer

-         import sources

+         from .source import PkgsetSourceContainer

+         from . import sources

          PkgsetSourceContainer.register_module(sources)

          container = PkgsetSourceContainer()

          SourceClass = container[pkgset_source]

@@ -15,7 +15,7 @@ 

  

  

  import os

- import cPickle as pickle

+ from six.moves import cPickle as pickle

  import json

  import re

  from kobo.shortcuts import force_list

@@ -15,7 +15,7 @@ 

  

  

  import os

- import cPickle as pickle

+ from six.moves import cPickle as pickle

  

  from kobo.shortcuts import run

  

file modified
+17 -17
@@ -39,7 +39,7 @@ 

  import os

  import fnmatch

  import shutil

- import pipes

+ from six.moves import shlex_quote

  

  from kobo.shortcuts import run

  
@@ -134,24 +134,24 @@ 

      mount_tmp = compose.mkdtemp(prefix="product_img_mount_")

      cmds = [

          # allocate image

-         "dd if=/dev/zero of=%s bs=1k count=5760" % pipes.quote(image),

+         "dd if=/dev/zero of=%s bs=1k count=5760" % shlex_quote(image),

          # create file system

-         "mke2fs -F %s" % pipes.quote(image),

+         "mke2fs -F %s" % shlex_quote(image),

          # use guestmount to mount the image, which doesn't require root privileges

          # LIBGUESTFS_BACKEND=direct: running qemu directly without libvirt

-         "LIBGUESTFS_BACKEND=direct guestmount -a %s -m /dev/sda %s" % (pipes.quote(image), pipes.quote(mount_tmp)),

-         "mkdir -p %s/run/install/product" % pipes.quote(mount_tmp),

-         "cp -rp %s/* %s/run/install/product/" % (pipes.quote(product_tmp), pipes.quote(mount_tmp)),

-         "mkdir -p %s/run/install/product/pyanaconda" % pipes.quote(mount_tmp),

+         "LIBGUESTFS_BACKEND=direct guestmount -a %s -m /dev/sda %s" % (shlex_quote(image), shlex_quote(mount_tmp)),

+         "mkdir -p %s/run/install/product" % shlex_quote(mount_tmp),

+         "cp -rp %s/* %s/run/install/product/" % (shlex_quote(product_tmp), shlex_quote(mount_tmp)),

+         "mkdir -p %s/run/install/product/pyanaconda" % shlex_quote(mount_tmp),

          # compat symlink: installclasses -> run/install/product/installclasses

-         "ln -s run/install/product/installclasses %s" % pipes.quote(mount_tmp),

+         "ln -s run/install/product/installclasses %s" % shlex_quote(mount_tmp),

          # compat symlink: locale -> run/install/product/locale

-         "ln -s run/install/product/locale %s" % pipes.quote(mount_tmp),

+         "ln -s run/install/product/locale %s" % shlex_quote(mount_tmp),

          # compat symlink: run/install/product/pyanaconda/installclasses -> ../installclasses

-         "ln -s ../installclasses %s/run/install/product/pyanaconda/installclasses" % pipes.quote(mount_tmp),

-         "fusermount -u %s" % pipes.quote(mount_tmp),

+         "ln -s ../installclasses %s/run/install/product/pyanaconda/installclasses" % shlex_quote(mount_tmp),

+         "fusermount -u %s" % shlex_quote(mount_tmp),

          # tweak last mount path written in the image

-         "tune2fs -M /run/install/product %s" % pipes.quote(image),

+         "tune2fs -M /run/install/product %s" % shlex_quote(image),

      ]

      run(" && ".join(cmds))

      shutil.rmtree(mount_tmp)
@@ -188,7 +188,7 @@ 

      tmp_dir = compose.mkdtemp(prefix="boot_iso_")

      mount_dir = compose.mkdtemp(prefix="boot_iso_mount_")

  

-     cmd = "mount -o loop %s %s" % (pipes.quote(buildinstall_boot_iso), pipes.quote(mount_dir))

+     cmd = "mount -o loop %s %s" % (shlex_quote(buildinstall_boot_iso), shlex_quote(mount_dir))

      run(cmd, logfile=log_file, show_cmd=True)

  

      images_dir = os.path.join(tmp_dir, "images")
@@ -219,19 +219,19 @@ 

      mkisofs_cmd = iso.get_mkisofs_cmd(boot_iso, None, volid=volume_id, exclude=["./lost+found"], graft_points=graft_points_path, **mkisofs_kwargs)

      run(mkisofs_cmd, logfile=log_file, show_cmd=True)

  

-     cmd = "umount %s" % pipes.quote(mount_dir)

+     cmd = "umount %s" % shlex_quote(mount_dir)

      run(cmd, logfile=log_file, show_cmd=True)

  

      if arch == "x86_64":

-         isohybrid_cmd = "isohybrid --uefi %s" % pipes.quote(boot_iso)

+         isohybrid_cmd = "isohybrid --uefi %s" % shlex_quote(boot_iso)

          run(isohybrid_cmd, logfile=log_file, show_cmd=True)

      elif arch == "i386":

-         isohybrid_cmd = "isohybrid %s" % pipes.quote(boot_iso)

+         isohybrid_cmd = "isohybrid %s" % shlex_quote(boot_iso)

          run(isohybrid_cmd, logfile=log_file, show_cmd=True)

  

      # implant MD5SUM to iso

      isomd5sum_cmd = iso.get_implantisomd5_cmd(boot_iso, compose.supported)

-     isomd5sum_cmd = " ".join([pipes.quote(i) for i in isomd5sum_cmd])

+     isomd5sum_cmd = " ".join([shlex_quote(i) for i in isomd5sum_cmd])

      run(isomd5sum_cmd, logfile=log_file, show_cmd=True)

  

      if boot_files:

file modified
+5 -5
@@ -109,7 +109,7 @@ 

      can_fail = getattr(image, 'can_fail', False)

      with failable(compose, can_fail, variant, arch, deliverable,

                    subvariant=image.subvariant):

-         with open(path) as f:

+         with open(path, 'rb') as f:

              iso = is_iso(f)

              if image.format == 'iso' and not iso:

                  raise RuntimeError('%s does not look like an ISO file' % path)
@@ -132,16 +132,16 @@ 

  

  

  def is_iso(f):

-     return _check_magic(f, 0x8001, 'CD001')

+     return _check_magic(f, 0x8001, b'CD001')

  

  

  def has_mbr(f):

-     return _check_magic(f, 0x1fe, '\x55\xAA')

+     return _check_magic(f, 0x1fe, b'\x55\xAA')

  

  

  def has_gpt(f):

-     return _check_magic(f, 0x200, 'EFI PART')

+     return _check_magic(f, 0x200, b'EFI PART')

  

  

  def has_eltorito(f):

-     return _check_magic(f, 0x8801, 'CD001\1EL TORITO SPECIFICATION')

+     return _check_magic(f, 0x8801, b'CD001\1EL TORITO SPECIFICATION')

file modified
+19 -18
@@ -22,14 +22,13 @@ 

  import sys

  import hashlib

  import errno

- import pipes

  import re

- import urlparse

  import contextlib

  import traceback

  import tempfile

  import time

  import functools

+ from six.moves import urllib, range, shlex_quote

  

  from kobo.shortcuts import run, force_list

  from productmd.common import get_major_version
@@ -166,7 +165,7 @@ 

      """Explode a rpm package into target_dir."""

      pkg_path = os.path.abspath(pkg_path)

      makedirs(target_dir)

-     run("rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % pipes.quote(pkg_path), workdir=target_dir)

+     run("rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex_quote(pkg_path), workdir=target_dir)

  

  

  def pkg_is_rpm(pkg_obj):
@@ -251,7 +250,7 @@ 

      Raises RuntimeError if there was an error. Most likely cause is failure to

      run git command.

      """

-     r = urlparse.urlsplit(url)

+     r = urllib.parse.urlsplit(url)

      ref = _get_git_ref(r.fragment)

      if not ref:

          return url
@@ -260,7 +259,7 @@ 

      # the final result must use original scheme.

      scheme = r.scheme.replace('git+', '')

  

-     baseurl = urlparse.urlunsplit((scheme, r.netloc, r.path, '', ''))

+     baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, '', ''))

      _, output = git_ls_remote(baseurl, ref)

  

      lines = [line for line in output.split('\n') if line]
@@ -274,9 +273,9 @@ 

          raise RuntimeError('Failed to resolve %s', url)

  

      fragment = lines[0].split()[0]

-     result = urlparse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))

+     result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))

      if '?#' in url:

-         # The urlparse library drops empty query string. This hack puts it back in.

+         # The urllib library drops empty query string. This hack puts it back in.

          result = result.replace('#', '?#')

      return result

  
@@ -309,7 +308,7 @@ 

      :rtype: a list of values

      """

      result = []

-     for conf_variant, conf_data in conf.get(var_name, {}).iteritems():

+     for conf_variant, conf_data in conf.get(var_name, {}).items():

          if not re.match(conf_variant, variant.uid):

              continue

          if keys is not None:
@@ -322,7 +321,7 @@ 

  

  

  def _apply_substitutions(compose, volid):

-     for k, v in compose.conf['volume_id_substitutions'].iteritems():

+     for k, v in compose.conf['volume_id_substitutions'].items():

          volid = volid.replace(k, v)

      return volid

  
@@ -565,16 +564,16 @@ 

  

  def levenshtein(a, b):

      """Compute Levenshtein edit distance between two strings."""

-     mat = [[0 for _ in xrange(len(a) + 1)] for _ in xrange(len(b) + 1)]

+     mat = [[0 for _ in range(len(a) + 1)] for _ in range(len(b) + 1)]

  

-     for i in xrange(len(a) + 1):

+     for i in range(len(a) + 1):

          mat[0][i] = i

  

-     for j in xrange(len(b) + 1):

+     for j in range(len(b) + 1):

          mat[j][0] = j

  

-     for j in xrange(1, len(b) + 1):

-         for i in xrange(1, len(a) + 1):

+     for j in range(1, len(b) + 1):

+         for i in range(1, len(a) + 1):

              cost = 0 if a[i - 1] == b[j - 1] else 1

              mat[j][i] = min(mat[j - 1][i] + 1,

                              mat[j][i - 1] + 1,
@@ -616,8 +615,9 @@ 

      If both path and logger are specified, more debugging information will be

      printed in case of failure.

      """

-     for i in xrange(max_retries):

-         proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

+     for i in range(max_retries):

+         proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,

+                                 universal_newlines=True)

          out, err = proc.communicate()

          if proc.returncode == 0:

              # We were successful
@@ -634,7 +634,8 @@ 

          ]

          for c in commands:

              try:

-                 proc = subprocess.Popen(c, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)

+                 proc = subprocess.Popen(c, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,

+                                         universal_newlines=True)

                  out, _ = proc.communicate()

                  logger.debug('`%s` exited with %s and following output:\n%s',

                               ' '.join(c), proc.returncode, out)
@@ -801,7 +802,7 @@ 

  

  @retry(wait_on=RuntimeError)

  def git_ls_remote(baseurl, ref):

-     return run(['git', 'ls-remote', baseurl, ref])

+     return run(['git', 'ls-remote', baseurl, ref], universal_newlines=True)

  

  

  def get_tz_offset():

file modified
+1 -1
@@ -225,7 +225,7 @@ 

          node.appendChild(doc.createTextNode(content))

      if lang:

          node.setAttribute("xml:lang", lang)

-     for attr, value in kwargs.iteritems():

+     for attr, value in kwargs.items():

          node.setAttribute(attr, value)

      parent.appendChild(node)

      return node

file modified
+8 -6
@@ -16,9 +16,10 @@ 

  

  import os

  import sys

- import pipes

  from fnmatch import fnmatch

  import contextlib

+ from functools import cmp_to_key

+ from six.moves import shlex_quote

  

  from kobo.shortcuts import force_list, relative_path, run

  from pungi import util
@@ -197,7 +198,7 @@ 

  

  def get_checkisomd5_data(iso_path, logger=None):

      cmd = get_checkisomd5_cmd(iso_path, just_print=True)

-     retcode, output = run(cmd)

+     retcode, output = run(cmd, universal_newlines=True)

      items = [line.strip().rsplit(":", 1) for line in output.splitlines()]

      items = dict([(k, v.strip()) for k, v in items])

      md5 = items.get(iso_path, '')
@@ -229,12 +230,13 @@ 

  

  

  def get_manifest_cmd(iso_name):

-     return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (pipes.quote(iso_name), pipes.quote(iso_name))

+     return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (

+         shlex_quote(iso_name), shlex_quote(iso_name))

  

  

  def get_volume_id(path):

      cmd = ["isoinfo", "-d", "-i", path]

-     retcode, output = run(cmd)

+     retcode, output = run(cmd, universal_newlines=True)

  

      for line in output.splitlines():

          line = line.strip()
@@ -340,7 +342,7 @@ 

          seen_dirs.add(dn)

  

      f = open(file_name, "w")

-     for i in sorted(result, cmp=cmp_graft_points):

+     for i in sorted(result, key=cmp_to_key(cmp_graft_points)):

          # make sure all files required for boot come first,

          # otherwise there may be problems with booting (large LBA address, etc.)

          found = False
@@ -413,7 +415,7 @@ 

      with util.temp_dir(prefix='iso-mount-') as mount_dir:

          env = {'LIBGUESTFS_BACKEND': 'direct', 'LIBGUESTFS_DEBUG': '1', 'LIBGUESTFS_TRACE': '1'}

          cmd = ["guestmount", "-a", image, "-m", "/dev/sda", mount_dir]

-         ret, out = run(cmd, env=env, can_fail=True)

+         ret, out = run(cmd, env=env, can_fail=True, universal_newlines=True)

          if ret != 0:

              # The mount command failed, something is wrong. Log the output and raise an exception.

              if logger:

file modified
+16 -10
@@ -15,7 +15,6 @@ 

  

  

  import os

- import pipes

  import re

  import time

  import threading
@@ -23,7 +22,8 @@ 

  

  import koji

  from kobo.shortcuts import run

- from ConfigParser import ConfigParser

+ import six

+ from six.moves import configparser, shlex_quote

  

  from .. import util

  from ..arch_utils import getBaseArch
@@ -105,7 +105,7 @@ 

          cmd.append(arch)

  

          if isinstance(command, list):

-             command = " ".join([pipes.quote(i) for i in command])

+             command = " ".join([shlex_quote(i) for i in command])

  

          # HACK: remove rpmdb and yum cache

          command = "rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; " + command
@@ -137,7 +137,8 @@ 

          """

          task_id = None

          with self.get_koji_cmd_env() as env:

-             retcode, output = run(command, can_fail=True, logfile=log_file, show_cmd=True, env=env)

+             retcode, output = run(command, can_fail=True, logfile=log_file,

+                                   show_cmd=True, env=env, universal_newlines=True)

          if "--task-id" in command:

              first_line = output.splitlines()[0]

              if re.match(r'^\d+$', first_line):
@@ -166,10 +167,13 @@ 

          # The minimum set of options

          min_options = ("name", "version", "target", "install_tree", "arches", "format", "kickstart", "ksurl", "distro")

          assert set(min_options).issubset(set(config_options['image-build'].keys())), "image-build requires at least %s got '%s'" % (", ".join(min_options), config_options)

-         cfg_parser = ConfigParser()

-         for section, opts in config_options.iteritems():

+         cfg_parser = configparser.ConfigParser()

+         for section, opts in config_options.items():

              cfg_parser.add_section(section)

-             for option, value in opts.iteritems():

+             for option, value in opts.items():

+                 if not isinstance(value, six.string_types):

+                     # Python 3 configparser will reject non-string values.

+                     value = str(value)

                  cfg_parser.set(section, option, value)

  

          fd = open(conf_file_dest, "w")
@@ -300,7 +304,7 @@ 

          attempt = 0

  

          while True:

-             retcode, output = run(cmd, can_fail=True, logfile=logfile)

+             retcode, output = run(cmd, can_fail=True, logfile=logfile, universal_newlines=True)

  

              if retcode == 0 or not self._has_connection_error(output):

                  # Task finished for reason other than connection error.
@@ -320,7 +324,8 @@ 

          command finishes.

          """

          with self.get_koji_cmd_env() as env:

-             retcode, output = run(command, can_fail=True, logfile=log_file, env=env)

+             retcode, output = run(command, can_fail=True, logfile=log_file,

+                                   env=env, universal_newlines=True)

  

          match = re.search(r"Created task: (\d+)", output)

          if not match:
@@ -508,7 +513,8 @@ 

              result.append(fmt % rpm_info)

      else:

          # local

-         retcode, output = run("rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'")

+         retcode, output = run("rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",

+                               universal_newlines=True)

          for i in output.splitlines():

              if not i:

                  continue

file modified
+2 -2
@@ -50,7 +50,7 @@ 

          kickstart = open(ks_path, "w")

  

          # repos

-         for repo_name, repo_url in repos.items() + lookaside_repos.items():

+         for repo_name, repo_url in list(repos.items()) + list(lookaside_repos.items()):

              if "://" not in repo_url:

                  repo_url = "file://" + os.path.abspath(repo_url)

              repo_str = "repo --name=%s --baseurl=%s" % (repo_name, repo_url)
@@ -214,7 +214,7 @@ 

          missing_comps = set()

  

          for line in f:

-             for file_type, pattern in PACKAGES_RE.iteritems():

+             for file_type, pattern in PACKAGES_RE.items():

                  match = pattern.match(line)

                  if match:

                      item = {}

@@ -36,7 +36,7 @@ 

          cmd.append("--arch=%s" % i)

  

      repos = repos or {}

-     for repo_id, repo_path in repos.iteritems():

+     for repo_id, repo_path in repos.items():

          cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))

          cmd.append(cmds[backend]['repoarg'] % repo_id)

          if backend == 'dnf':
@@ -46,7 +46,7 @@ 

              cmd.append('--check=%s' % repo_id)

  

      lookaside = lookaside or {}

-     for repo_id, repo_path in lookaside.iteritems():

+     for repo_id, repo_path in lookaside.items():

          cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))

          cmd.append(cmds[backend]['lookaside'] % repo_id)

  

file modified
+9 -8
@@ -17,9 +17,9 @@ 

  

  import os

  import shutil

- import pipes

  import glob

  import six

+ from six.moves import shlex_quote

  

  import kobo.log

  from kobo.shortcuts import run, force_list
@@ -44,7 +44,8 @@ 

      def run_process_command(self, cwd):

          if self.command:

              self.log_debug('Running "%s"' % self.command)

-             retcode, output = run(self.command, workdir=cwd, can_fail=True)

+             retcode, output = run(self.command, workdir=cwd, can_fail=True,

+                                   universal_newlines=True)

              if retcode != 0:

                  self.log_error('Output was: "%s"' % output)

                  raise RuntimeError('%r failed with exit code %s'
@@ -112,12 +113,12 @@ 

              self.log_debug("Exporting directory %s from git %s (branch %s)..."

                             % (scm_dir, scm_root, scm_branch))

              cmd = ("/usr/bin/git archive --remote=%s %s %s | tar xf -"

-                    % (pipes.quote(scm_root), pipes.quote(scm_branch), pipes.quote(scm_dir)))

+                    % (shlex_quote(scm_root), shlex_quote(scm_branch), shlex_quote(scm_dir)))

              # git archive is not supported by http/https

              # or by smart http https://git-scm.com/book/en/v2/Git-on-the-Server-Smart-HTTP

              if scm_root.startswith("http") or self.command:

                  cmd = ("/usr/bin/git clone --depth 1 --branch=%s %s %s"

-                        % (pipes.quote(scm_branch), pipes.quote(scm_root), pipes.quote(tmp_dir)))

+                        % (shlex_quote(scm_branch), shlex_quote(scm_root), shlex_quote(tmp_dir)))

              self.retry_run(cmd, workdir=tmp_dir, show_cmd=True)

              self.run_process_command(tmp_dir)

  
@@ -136,12 +137,12 @@ 

              self.log_debug("Exporting file %s from git %s (branch %s)..."

                             % (scm_file, scm_root, scm_branch))

              cmd = ("/usr/bin/git archive --remote=%s %s %s | tar xf -"

-                    % (pipes.quote(scm_root), pipes.quote(scm_branch), pipes.quote(scm_file)))

+                    % (shlex_quote(scm_root), shlex_quote(scm_branch), shlex_quote(scm_file)))

              # git archive is not supported by http/https

              # or by smart http https://git-scm.com/book/en/v2/Git-on-the-Server-Smart-HTTP

              if scm_root.startswith("http") or self.command:

                  cmd = ("/usr/bin/git clone --depth 1 --branch=%s %s %s"

-                        % (pipes.quote(scm_branch), pipes.quote(scm_root), pipes.quote(tmp_dir)))

+                        % (shlex_quote(scm_branch), shlex_quote(scm_root), shlex_quote(tmp_dir)))

              self.retry_run(cmd, workdir=tmp_dir, show_cmd=True)

              self.run_process_command(tmp_dir)

  
@@ -167,8 +168,8 @@ 

                  if scm_dir.endswith("/"):

                      copy_all(os.path.join(tmp_dir, scm_dir), target_dir)

                  else:

-                     run("cp -a %s %s/" % (pipes.quote(os.path.join(tmp_dir, scm_dir)),

-                                           pipes.quote(target_dir)))

+                     run("cp -a %s %s/" % (shlex_quote(os.path.join(tmp_dir, scm_dir)),

+                                           shlex_quote(target_dir)))

  

      def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):

          for rpm in self._list_rpms(scm_root):

file modified
+17 -24
@@ -15,15 +15,9 @@ 

  

  from __future__ import print_function

  import os

- import sys

  import copy

  import lxml.etree

- 

- 

- # HACK: define cmp in python3

- if sys.version_info[0] == 3:

-     def cmp(a, b):

-         return (a > b) - (a < b)

+ from functools import total_ordering

  

  

  def get_variants_dtd(logger=None):
@@ -203,6 +197,7 @@ 

          return result

  

  

+ @total_ordering

  class Variant(object):

      def __init__(self, id, name, type, arches, groups, environments=None,

                   buildinstallpackages=None, is_empty=False, parent=None,
@@ -216,10 +211,9 @@ 

          self.name = name

          self.type = type

          self.arches = sorted(copy.deepcopy(arches))

-         self.groups = sorted(copy.deepcopy(groups), lambda x, y: cmp(x["name"], y["name"]))

-         self.environments = sorted(copy.deepcopy(environments), lambda x, y: cmp(x["name"], y["name"]))

-         self.modules = sorted(copy.deepcopy(modules),

-                               lambda x, y: cmp(x["name"], y["name"]))

+         self.groups = sorted(copy.deepcopy(groups), key=lambda x: x["name"])

+         self.environments = sorted(copy.deepcopy(environments), key=lambda x: x["name"])

+         self.modules = sorted(copy.deepcopy(modules), key=lambda x: x["name"])

          self.buildinstallpackages = sorted(buildinstallpackages)

          self.variants = {}

          self.parent = parent
@@ -238,19 +232,18 @@ 

      def __repr__(self):

          return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format(self)

  

-     def __cmp__(self, other):

-         # variant < addon, layered-product < optional

-         if self.type == other.type:

-             return cmp(self.uid, other.uid)

-         if self.type == "variant":

-             return -1

-         if other.type == "variant":

-             return 1

-         if self.type == "optional":

-             return 1

-         if other.type == "optional":

-             return -1

-         return cmp(self.uid, other.uid)

+     def __eq__(self, other):

+         return self.type == other.type and self.uid == other.uid

+ 

+     def __ne__(self, other):

+         return not (self == other)

+ 

+     def __lt__(self, other):

+         ORDERING = {'variant': 0, 'addon': 1, 'layered-product': 1, 'optional': 2}

+         return (ORDERING[self.type], self.uid) < (ORDERING[other.type], other.uid)

+ 

+     def __hash__(self):

+         return hash((self.type, self.uid))

  

      @property

      def uid(self):

file modified
+5 -7
@@ -16,10 +16,7 @@ 

  import os

  import productmd

  import tempfile

- try:

-     from shlex import quote

- except ImportError:

-     from pipes import quote

+ from six.moves import shlex_quote

  

  from pungi import util

  from pungi.phases.buildinstall import tweak_configs
@@ -27,8 +24,8 @@ 

  

  

  def sh(log, cmd, *args, **kwargs):

-     log.info('Running: %s', ' '.join(quote(x) for x in cmd))

-     ret, out = shortcuts.run(cmd, *args, **kwargs)

+     log.info('Running: %s', ' '.join(shlex_quote(x) for x in cmd))

+     ret, out = shortcuts.run(cmd, *args, universal_newlines=True, **kwargs)

      if out:

          log.debug('%s', out)

      return ret, out
@@ -36,7 +33,8 @@ 

  

  def get_lorax_dir(default='/usr/share/lorax'):

      try:

-         _, out = shortcuts.run(['python3', '-c' 'import pylorax; print(pylorax.find_templates())'])

+         _, out = shortcuts.run(['python3', '-c' 'import pylorax; print(pylorax.find_templates())'],

+                                universal_newlines=True)

          return out.strip()

      except Exception:

          return default

file modified
+3 -3
@@ -220,7 +220,7 @@ 

  

      def createrepo(self):

          # remove old repomd.xml checksums from treeinfo

-         for arch, ti in self.treeinfo.iteritems():

+         for arch, ti in self.treeinfo.items():

              print("Removing old repomd.xml checksums from treeinfo: {0}".format(arch))

              for i in ti.checksums.checksums.keys():

                  if "repomd.xml" in i:
@@ -257,7 +257,7 @@ 

                  ti.checksums.add(os.path.relpath(repomd_path, tree_dir), 'sha256', root_dir=tree_dir)

  

          # write treeinfo

-         for arch, ti in self.treeinfo.iteritems():

+         for arch, ti in self.treeinfo.items():

              print("Writing treeinfo: {0}".format(arch))

              ti_path = os.path.join(self.temp_dir, "trees", arch, ".treeinfo")

              makedirs(os.path.dirname(ti_path))
@@ -265,7 +265,7 @@ 

  

      def discinfo(self):

          # write discinfo and media repo

-         for arch, ti in self.treeinfo.iteritems():

+         for arch, ti in self.treeinfo.items():

              di_path = os.path.join(self.temp_dir, "trees", arch, ".discinfo")

              description = "%s %s" % (ti.release.name, ti.release.version)

              if ti.release.is_layered:

file modified
+6 -3
@@ -10,6 +10,7 @@ 

  import shutil

  import errno

  import imp

+ import six

  

  from pungi.util import get_arch_variant_data

  from pungi import paths, checks
@@ -42,7 +43,7 @@ 

          return self.uid

  

      def get_variants(self, arch=None, types=None):

-         return [v for v in self.variants.values()

+         return [v for v in list(self.variants.values())

                  if (not arch or arch in v.arches) and (not types or v.type in types)]

  

  
@@ -117,7 +118,7 @@ 

          self.variants['Server'].variants['HA'] = self.all_variants['Server-HA']

  

      def get_variants(self, arch=None, types=None):

-         return [v for v in self.all_variants.values()

+         return [v for v in list(self.all_variants.values())

                  if (not arch or arch in v.arches) and (not types or v.type in types)]

  

      def can_fail(self, variant, arch, deliverable):
@@ -126,7 +127,7 @@ 

  

      def get_arches(self):

          result = set()

-         for variant in self.variants.itervalues():

+         for variant in list(self.variants.values()):

              result |= set(variant.arches)

          return sorted(result)

  
@@ -142,6 +143,8 @@ 

          os.makedirs(os.path.dirname(path))

      except OSError:

          pass

+     if not isinstance(content, six.binary_type):

+         content = content.encode()

      with open(path, 'wb') as f:

          f.write(content)

      return path

file modified
+8 -4
@@ -1,9 +1,8 @@ 

  import mock

  import unittest

  import six

- import pungi

  

- from helpers import load_bin

+ from tests.helpers import load_bin

  

  cli = load_bin("pungi-koji")

  
@@ -12,8 +11,13 @@ 

  

      @mock.patch('sys.argv', new=['prog', '--version'])

      @mock.patch('sys.stderr', new_callable=six.StringIO)

+     @mock.patch('sys.stdout', new_callable=six.StringIO)

      @mock.patch('pungi_cli_fake_pungi-koji.get_full_version', return_value='a-b-c.111')

-     def test_version(self, get_full_version, stderr):

+     def test_version(self, get_full_version, stdout, stderr):

          with self.assertRaises(SystemExit):

              cli.main()

-         self.assertMultiLineEqual(stderr.getvalue(), 'a-b-c.111\n') 

\ No newline at end of file

+         # Python 2.7 prints the version to stderr, 3.4+ to stdout.

+         if six.PY3:

+             self.assertMultiLineEqual(stdout.getvalue(), 'a-b-c.111\n')

+         else:

+             self.assertMultiLineEqual(stderr.getvalue(), 'a-b-c.111\n')

@@ -840,5 +840,6 @@ 

                      f.read().strip(),

                      ':LABEL=new\\\\x20volid ks=hd:LABEL=new\\\\x20volid:/ks.cfg')

  

+ 

  if __name__ == "__main__":

      unittest.main()

file modified
+30 -35
@@ -1,4 +1,3 @@ 

- #!/usr/bin/env python2

  # -*- coding: utf-8 -*-

  

  import mock
@@ -8,7 +7,7 @@ 

      import unittest

  import os

  import sys

- import StringIO

+ from six import StringIO

  

  import kobo.conf

  
@@ -26,7 +25,7 @@ 

          def custom_exists(path):

              return False

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = custom_exists

                  result = checks.check({})
@@ -35,7 +34,7 @@ 

          self.assertFalse(result)

  

      def test_all_deps_ok(self):

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('platform.machine') as machine:

                  machine.return_value = 'x86_64'

                  with mock.patch('os.path.exists') as exists:
@@ -50,7 +49,7 @@ 

              'create_jigdo': False

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('platform.machine') as machine:

                  machine.return_value = 'x86_64'

                  with mock.patch('os.path.exists') as exists:
@@ -67,7 +66,7 @@ 

              'runroot': True,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

                  result = checks.check(conf)
@@ -81,7 +80,7 @@ 

              'runroot': True,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

                  result = checks.check(conf)
@@ -96,14 +95,14 @@ 

              'runroot': True,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('platform.machine') as machine:

                  machine.return_value = 'armhfp'

                  with mock.patch('os.path.exists') as exists:

                      exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

                      result = checks.check(conf)

  

-         self.assertRegexpMatches(out.getvalue(), r'^Not checking.*Expect failures.*$')

+         self.assertRegex(out.getvalue(), r'^Not checking.*Expect failures.*$')

          self.assertTrue(result)

  

      def test_isohybrid_not_needed_in_runroot(self):
@@ -111,7 +110,7 @@ 

              'runroot': True,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/isohybrid'])

                  result = checks.check(conf)
@@ -124,7 +123,7 @@ 

              'runroot': True,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])

                  result = checks.check(conf)
@@ -139,7 +138,7 @@ 

              'bootable': True,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/genisoimage'])

                  result = checks.check(conf)
@@ -148,7 +147,7 @@ 

          self.assertFalse(result)

  

      def test_requires_modifyrepo(self):

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/modifyrepo'])

                  result = checks.check({})
@@ -157,7 +156,7 @@ 

          self.assertFalse(result)

  

      def test_requires_createrepo_c(self):

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/createrepo_c'])

                  result = checks.check({})
@@ -170,7 +169,7 @@ 

              'createrepo_c': False,

          }

  

-         with mock.patch('sys.stdout', new_callable=StringIO.StringIO) as out:

+         with mock.patch('sys.stdout', new_callable=StringIO) as out:

              with mock.patch('os.path.exists') as exists:

                  exists.side_effect = self.dont_find(['/usr/bin/createrepo_c'])

                  result = checks.check(conf)
@@ -228,7 +227,7 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 1)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")

          self.assertEqual(config.get("release_name", None), "dummy product")

  

      @mock.patch('pungi.checks.make_schema')
@@ -276,7 +275,7 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 1)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")

          self.assertEqual(config.get("release_name", None), "dummy product")

  

      @mock.patch('pungi.checks.make_schema')
@@ -300,9 +299,9 @@ 

          config = self._load_conf_from_string(string)

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 1)

-         self.assertRegexpMatches(errors[0], r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*")

+         self.assertRegex(errors[0], r"^ERROR: Config option 'product_name' is an alias of 'release_name', only one can be used.*")

          self.assertEqual(len(warnings), 1)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'product_name' is deprecated and now an alias to 'release_name'.*")

          self.assertEqual(config.get("release_name", None), "dummy product")

  

      @mock.patch('pungi.checks.make_schema')
@@ -341,8 +340,8 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 2)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")

-         self.assertRegexpMatches(warnings[1], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")

+         self.assertRegex(warnings[0], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")

+         self.assertRegex(warnings[1], r"^WARNING: Config option '.+' is deprecated and now an alias to '.+'.*")

          self.assertEqual(config.get("release_name", None), "dummy product")

          self.assertEqual(config.get("foophase", {}).get("repo", None), "http://www.exampe.com/os")

  
@@ -381,8 +380,8 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 2)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

-         self.assertRegexpMatches(warnings[1], r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

+         self.assertRegex(warnings[1], r"^WARNING: Value from config option 'repo_from' is now appended to option 'repo'")

          self.assertEqual(config.get("release_name", None), "dummy product")

          self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])

  
@@ -420,8 +419,8 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 2)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

-         self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

+         self.assertRegex(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")

          self.assertEqual(config.get("release_name", None), "dummy product")

          self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server"])

  
@@ -463,10 +462,10 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 4)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

-         self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")

-         self.assertRegexpMatches(warnings[2], r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'")

-         self.assertRegexpMatches(warnings[3], r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

+         self.assertRegex(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified,")

+         self.assertRegex(warnings[2], r"^WARNING: Config option 'source_repo_from' is deprecated, its value will be appended to option 'repo'")

+         self.assertRegex(warnings[3], r"^WARNING: Value from config option 'source_repo_from' is now appended to option 'repo'.")

          self.assertEqual(config.get("release_name", None), "dummy product")

          self.assertEqual(config.get("repo", None), ["http://url/to/repo", "Server", "Client"])

  
@@ -516,8 +515,8 @@ 

          errors, warnings = checks.validate(config)

          self.assertEqual(len(errors), 0)

          self.assertEqual(len(warnings), 2)

-         self.assertRegexpMatches(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

-         self.assertRegexpMatches(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*")

+         self.assertRegex(warnings[0], r"^WARNING: Config option 'repo_from' is deprecated, its value will be appended to option 'repo'.*")

+         self.assertRegex(warnings[1], r"^WARNING: Config option 'repo' is not found, but 'repo_from' is specified, value from 'repo_from' is now added as 'repo'.*")

          self.assertEqual(config.get("live_images")[0][1]['armhfp']['repo'], 'Everything')

  

  
@@ -550,7 +549,3 @@ 

              [mock.call.warning('Unusually strict umask detected (0%03o), '

                                 'expect files with broken permissions.', 0o044)]

          )

- 

- 

- if __name__ == "__main__":

-     unittest.main()

file modified
+6 -6
@@ -234,9 +234,9 @@ 

          compose = Compose(conf, self.tmp_dir)

          compose.read_variants()

  

-         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+         self.assertEqual(sorted(v.uid for v in compose.variants.values()),

                           ['Client', 'Crashy', 'Live', 'Server'])

-         self.assertEqual(sorted([v.uid for v in compose.variants['Server'].variants.itervalues()]),

+         self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()),

                           ['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])

          self.assertItemsEqual(compose.variants['Client'].arches,

                                ['i386', 'x86_64'])
@@ -278,9 +278,9 @@ 

          compose = Compose(conf, self.tmp_dir)

          compose.read_variants()

  

-         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+         self.assertEqual(sorted(v.uid for v in compose.variants.values()),

                           ['Client', 'Live', 'Server'])

-         self.assertEqual(sorted([v.uid for v in compose.variants['Server'].variants.itervalues()]),

+         self.assertEqual(sorted(v.uid for v in compose.variants['Server'].variants.values()),

                           ['Server-Gluster', 'Server-ResilientStorage', 'Server-optional'])

          self.assertItemsEqual(compose.variants['Client'].arches,

                                ['x86_64'])
@@ -324,7 +324,7 @@ 

          compose = Compose(conf, self.tmp_dir)

          compose.read_variants()

  

-         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+         self.assertEqual(sorted(v.uid for v in compose.variants.values()),

                           ['Client', 'Server'])

          self.assertItemsEqual(compose.variants['Client'].arches,

                                ['i386', 'x86_64'])
@@ -364,7 +364,7 @@ 

          compose = Compose(conf, self.tmp_dir, logger=logger)

          compose.read_variants()

  

-         self.assertEqual(sorted([v.uid for v in compose.variants.itervalues()]),

+         self.assertEqual(sorted(v.uid for v in compose.variants.values()),

                           ['Client', 'Server'])

          self.assertItemsEqual(compose.variants['Client'].arches,

                                ['x86_64'])

file modified
+4 -4
@@ -27,10 +27,10 @@ 

          self.file = tempfile.NamedTemporaryFile(prefix='comps-wrapper-test-')

  

      def assertFilesEqual(self, fn1, fn2):

-         with open(fn1) as f1:

-             lines1 = f1.read().splitlines()

-         with open(fn2) as f2:

-             lines2 = f2.read().splitlines()

+         with open(fn1, 'rb') as f1:

+             lines1 = f1.read().decode('utf-8').splitlines()

+         with open(fn2, 'rb') as f2:

+             lines2 = f2.read().decode('utf-8').splitlines()

          diff = '\n'.join(difflib.unified_diff(lines1, lines2,

                                                fromfile='EXPECTED', tofile='ACTUAL'))

          self.assertEqual(diff, '', 'Files differ:\n' + diff)

file modified
+38 -5
@@ -8,7 +8,9 @@ 

      import unittest

  

  import os

+ import six

  import sys

+ import mock

  

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

  
@@ -249,6 +251,38 @@ 

              cfg,

              [checks.REQUIRES.format('gather_source', 'json', 'gather_source_mapping')])

  

+     def test_dnf_backend_is_default_on_py3(self):

+         cfg = load_config(

+             pkgset_source='koji',

+             pkgset_koji_tag='f27',

+         )

+ 

+         with mock.patch('six.PY2', new=False):

+             self.assertValidation(cfg, [])

+         self.assertEqual(cfg['gather_backend'], 'dnf')

+ 

+     def test_yum_backend_is_default_on_py2(self):

+         cfg = load_config(

+             pkgset_source='koji',

+             pkgset_koji_tag='f27',

+         )

+ 

+         with mock.patch('six.PY2', new=True):

+             self.assertValidation(cfg, [])

+         self.assertEqual(cfg['gather_backend'], 'yum')

+ 

+     def test_yum_backend_is_rejected_on_py3(self):

+         cfg = load_config(

+             pkgset_source='koji',

+             pkgset_koji_tag='f27',

+             gather_backend='yum',

+         )

+ 

+         with mock.patch('six.PY2', new=False):

+             self.assertValidation(

+                 cfg,

+                 ["Failed validation in gather_backend: 'yum' is not one of ['dnf']"])

+ 

  

  class OSBSConfigTestCase(ConfigTestCase):

      def test_validate(self):
@@ -391,11 +425,10 @@ 

          cfg = load_config(PKGSET_REPOS,

                            multilib=[('^*$', {'*': []})])

  

-         self.assertValidation(

-             cfg,

-             ['Failed validation in multilib.0.0: incorrect regular '

-              'expression: nothing to repeat'],

-             [])

+         msg = 'Failed validation in multilib.0.0: incorrect regular expression: nothing to repeat'

+         if six.PY3:

+             msg += ' at position 1'

+         self.assertValidation(cfg, [msg], [])

  

  

  class RepoclosureTestCase(ConfigTestCase):

@@ -1,8 +1,6 @@ 

- #!/usr/bin/env python

  # -*- coding: utf-8 -*-

  

  

- import unittest

  import mock

  import os

  import subprocess
@@ -27,9 +25,6 @@ 

                               stdout=subprocess.PIPE,

                               stderr=subprocess.PIPE)

          (stdout, stderr) = p.communicate()

+         self.assertEqual(b'', stdout)

+         self.assertEqual(b'', stderr)

          self.assertEqual(0, p.returncode)

-         self.assertEqual('', stdout)

-         self.assertEqual('', stderr)

- 

- if __name__ == '__main__':

-     unittest.main()

@@ -31,6 +31,7 @@ 

          pool = ThreadPool.return_value

  

          phase = createiso.CreateisoPhase(compose)

+         phase.logger = mock.Mock()

          phase.run()

  

          self.assertEqual(len(pool.add.call_args_list), 0)
@@ -88,6 +89,7 @@ 

          pool = ThreadPool.return_value

  

          phase = createiso.CreateisoPhase(compose)

+         phase.logger = mock.Mock()

          phase.run()

  

          self.assertEqual(prepare_iso.call_args_list,
@@ -151,6 +153,7 @@ 

          pool = ThreadPool.return_value

  

          phase = createiso.CreateisoPhase(compose)

+         phase.logger = mock.Mock()

          phase.run()

  

          self.assertItemsEqual(
@@ -633,7 +636,7 @@ 

          self.sizes = sizes

  

      def __call__(self, path):

-         for fragment, size in self.sizes.iteritems():

+         for fragment, size in self.sizes.items():

              if fragment in path:

                  return size

          return 0

@@ -6,7 +6,7 @@ 

  

  import os

  import sys

- import StringIO

+ from six.moves import StringIO

  

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

  
@@ -19,7 +19,7 @@ 

      def setUp(self):

          super(CreateIsoScriptTest, self).setUp()

          self.outdir = os.path.join(self.topdir, 'isos')

-         self.out = StringIO.StringIO()

+         self.out = StringIO()

          self.maxDiff = None

  

      def assertScript(self, cmds):

@@ -715,6 +715,7 @@ 

      def __repr__(self):

          return u'ANY'

  

+ 

  ANY = ANYSingleton()

  

  
@@ -728,7 +729,7 @@ 

      def assertProductIds(self, mapping):

          pids = glob.glob(self.compose.paths.work.product_id('*', '*'))

          expected = set()

-         for variant, arches in mapping.iteritems():

+         for variant, arches in mapping.items():

              for arch in arches:

                  expected.add(os.path.join(self.topdir, 'work', arch,

                                            'product_id',
@@ -800,8 +801,8 @@ 

              get_productids_from_scm(self.compose)

  

          self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, ANY)])

-         self.assertEqual(str(ctx.exception),

-                          'No product certificate found (arch: amd64, variant: Everything)')

+         self.assertRegex(str(ctx.exception),

+                          r'No product certificate found \(arch: amd64, variant: (Everything|Client)\)')

  

      @mock.patch('pungi.phases.createrepo.get_dir_from_scm')

      def test_multiple_matching(self, get_dir_from_scm):
@@ -822,8 +823,8 @@ 

              get_productids_from_scm(self.compose)

  

          self.assertEqual(get_dir_from_scm.call_args_list, [mock.call(cfg, ANY)])

-         self.assertRegexpMatches(str(ctx.exception),

-                                  'Multiple product certificates found.+')

+         self.assertRegex(str(ctx.exception),

+                          'Multiple product certificates found.+')

  

  

  if __name__ == "__main__":

@@ -159,7 +159,7 @@ 

              extra_files.copy_extra_files(

                  compose, [cfg], 'x86_64', compose.variants['Server'], package_sets)

  

-         self.assertRegexpMatches(str(ctx.exception), 'No.*package.*matching bad-server\*.*')

+         self.assertRegex(str(ctx.exception), 'No.*package.*matching bad-server\*.*')

  

          self.assertEqual(len(get_file_from_scm.call_args_list), 0)

          self.assertEqual(get_dir_from_scm.call_args_list, [])

file modified
+7 -3
@@ -9,6 +9,7 @@ 

  import os

  import tempfile

  import shutil

+ import six

  import sys

  import logging

  
@@ -27,6 +28,11 @@ 

  except ImportError:

      HAS_DNF = False

  

+ if six.PY2:

+     HAS_YUM = True

+ else:

+     HAS_YUM = False

+ 

  

  def convert_pkg_map(data):

      """
@@ -1627,6 +1633,7 @@ 

          ])

  

  

+ @unittest.skipUnless(HAS_YUM, 'YUM only available on Python 2')

  class PungiYumDepsolvingTestCase(DepsolvingBase, unittest.TestCase):

  

      def setUp(self):
@@ -1789,6 +1796,3 @@ 

          self.assertFlags("dummy-krb5-devel-1.10-5.x86_64", [PkgFlag.lookaside])

          self.assertFlags("dummy-krb5-1.10-5.src", [PkgFlag.lookaside])

          self.assertFlags("dummy-krb5-debuginfo-1.10-5.x86_64", [PkgFlag.lookaside])

- 

- if __name__ == "__main__":

-     unittest.main()

@@ -34,6 +34,9 @@ 

      def __repr__(self):

          return self.nvr

  

+     def __lt__(self, another):

+         return self.nvr < another.nvr

+ 

  

  def _join(a, *rest):

      res = copy.deepcopy(a)

file modified
+2 -2
@@ -83,8 +83,8 @@ 

          phase = init.InitPhase(compose)

          phase.run()

  

-         self.assertEqual(write_global.mock_calls, [])

-         self.assertEqual(write_prepopulate.mock_calls, [mock.call(compose)])

+         self.assertItemsEqual(write_global.mock_calls, [])

+         self.assertItemsEqual(write_prepopulate.mock_calls, [mock.call(compose)])

          self.assertItemsEqual(write_arch.mock_calls, [])

          self.assertItemsEqual(create_comps.mock_calls, [])

          self.assertItemsEqual(write_variant.mock_calls, [])

file modified
+4 -2
@@ -26,7 +26,8 @@ 

          self.assertEqual(iso.get_implanted_md5('dummy.iso', logger=logger),

                           '31ff3e405e26ad01c63b62f6b11d30f6')

          self.assertEqual(mock_run.call_args_list,

-                          [mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'])])

+                          [mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'],

+                                     universal_newlines=True)])

          self.assertEqual(logger.mock_calls, [])

  

      @mock.patch('pungi.wrappers.iso.run')
@@ -35,7 +36,8 @@ 

          logger = mock.Mock()

          self.assertIsNone(iso.get_implanted_md5('dummy.iso', logger=logger))

          self.assertEqual(mock_run.call_args_list,

-                          [mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'])])

+                          [mock.call(['/usr/bin/checkisomd5', '--md5sumonly', 'dummy.iso'],

+                                     universal_newlines=True)])

          self.assertGreater(len(logger.mock_calls), 0)

  

      @mock.patch('pungi.util.run_unmount_cmd')

file modified
+38 -23
@@ -18,7 +18,7 @@ 

  

  class DumbMock(object):

      def __init__(self, **kwargs):

-         for key, value in kwargs.iteritems():

+         for key, value in kwargs.items():

              setattr(self, key, value)

  

  
@@ -83,6 +83,7 @@ 

                      'ksurl': 'git://example.com/ks.git',

                      'distro': 'test-distro',

                      'release': '20160222.0',

+                     'disk_size': 4,

                  }

              },

              self.tmpfile
@@ -105,7 +106,8 @@ 

                                 'kickstart = test-kickstart',

                                 'ksurl = git://example.com/ks.git',

                                 'distro = test-distro',

-                                'release = 20160222.0'])

+                                'release = 20160222.0',

+                                'disk_size = 4'])

  

      def test_get_image_paths(self):

  
@@ -391,7 +393,8 @@ 

          self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': None})

          self.assertEqual(

              run.call_args_list,

-             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True)]

+             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True,

+                        universal_newlines=True)]

          )

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')
@@ -404,7 +407,8 @@ 

          self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': 1234})

          self.assertEqual(

              run.call_args_list,

-             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True)]

+             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True,

+                        universal_newlines=True)]

          )

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')
@@ -417,7 +421,8 @@ 

          self.assertDictEqual(result, {'retcode': 1, 'output': output, 'task_id': None})

          self.assertEqual(

              run.call_args_list,

-             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True)]

+             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True,

+                        universal_newlines=True)]

          )

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')
@@ -430,7 +435,8 @@ 

          self.assertDictEqual(result, {'retcode': 1, 'output': output, 'task_id': 12345})

          self.assertEqual(

              run.call_args_list,

-             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True)]

+             [mock.call(cmd, can_fail=True, env=None, logfile=None, show_cmd=True,

+                        universal_newlines=True)]

          )

  

      @mock.patch.dict('os.environ', {'FOO': 'BAR'}, clear=True)
@@ -450,7 +456,7 @@ 

          self.assertEqual(

              run.call_args_list,

              [mock.call(cmd, can_fail=True, env={'KRB5CCNAME': 'DIR:/tmp/foo', 'FOO': 'BAR'},

-                        logfile=None, show_cmd=True)]

+                        logfile=None, show_cmd=True, universal_newlines=True)]

          )

  

  
@@ -464,7 +470,8 @@ 

  

          self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': 1234})

          self.assertItemsEqual(run.mock_calls,

-                               [mock.call('cmd', can_fail=True, logfile=None, env=None)])

+                               [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                          universal_newlines=True)])

  

      @mock.patch.dict('os.environ', {'FOO': 'BAR'}, clear=True)

      @mock.patch('pungi.util.temp_dir')
@@ -480,7 +487,8 @@ 

          self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': 1234})

          self.assertItemsEqual(run.mock_calls,

                                [mock.call('cmd', can_fail=True, logfile=None,

-                                          env={'KRB5CCNAME': 'DIR:/tmp/foo', 'FOO': 'BAR'})])

+                                          env={'KRB5CCNAME': 'DIR:/tmp/foo', 'FOO': 'BAR'},

+                                          universal_newlines=True)])

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')

      def test_with_log(self, run):
@@ -491,7 +499,8 @@ 

  

          self.assertDictEqual(result, {'retcode': 0, 'output': output, 'task_id': 1234})

          self.assertItemsEqual(run.mock_calls,

-                               [mock.call('cmd', can_fail=True, logfile='logfile', env=None)])

+                               [mock.call('cmd', can_fail=True, logfile='logfile', env=None,

+                                          universal_newlines=True)])

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')

      def test_fail_with_task_id(self, run):
@@ -502,7 +511,8 @@ 

  

          self.assertDictEqual(result, {'retcode': 1, 'output': output, 'task_id': 1234})

          self.assertItemsEqual(run.mock_calls,

-                               [mock.call('cmd', can_fail=True, logfile=None, env=None)])

+                               [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                          universal_newlines=True)])

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')

      def test_fail_without_task_id(self, run):
@@ -513,7 +523,8 @@ 

              self.koji.run_blocking_cmd('cmd')

  

          self.assertItemsEqual(run.mock_calls,

-                               [mock.call('cmd', can_fail=True, logfile=None, env=None)])

+                               [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                          universal_newlines=True)])

          self.assertIn('Could not find task ID', str(ctx.exception))

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')
@@ -526,9 +537,10 @@ 

  

          self.assertDictEqual(result, {'retcode': 0, 'output': retry, 'task_id': 1234})

          self.assertEqual(run.mock_calls,

-                          [mock.call('cmd', can_fail=True, logfile=None, env=None),

+                          [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                     universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None)])

+                                     can_fail=True, logfile=None, universal_newlines=True)])

  

      @mock.patch('pungi.wrappers.kojiwrapper.run')

      def test_disconnect_and_retry_but_fail(self, run):
@@ -540,9 +552,10 @@ 

  

          self.assertDictEqual(result, {'retcode': 1, 'output': retry, 'task_id': 1234})

          self.assertEqual(run.mock_calls,

-                          [mock.call('cmd', can_fail=True, logfile=None, env=None),

+                          [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                     universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None)])

+                                     can_fail=True, logfile=None, universal_newlines=True)])

  

      @mock.patch('time.sleep')

      @mock.patch('pungi.wrappers.kojiwrapper.run')
@@ -555,13 +568,14 @@ 

  

          self.assertDictEqual(result, {'retcode': 0, 'output': retry, 'task_id': 1234})

          self.assertEqual(run.mock_calls,

-                          [mock.call('cmd', can_fail=True, logfile=None, env=None),

+                          [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                     universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None),

+                                     can_fail=True, logfile=None, universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None),

+                                     can_fail=True, logfile=None, universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None)])

+                                     can_fail=True, logfile=None, universal_newlines=True)])

          self.assertEqual(sleep.mock_calls,

                           [mock.call(i * 10) for i in range(1, 3)])

  
@@ -576,11 +590,12 @@ 

  

          self.assertIn('Failed to wait', str(ctx.exception))

          self.assertEqual(run.mock_calls,

-                          [mock.call('cmd', can_fail=True, logfile=None, env=None),

+                          [mock.call('cmd', can_fail=True, logfile=None, env=None,

+                                     universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None),

+                                     can_fail=True, logfile=None, universal_newlines=True),

                            mock.call(['koji', '--profile=custom-koji', 'watch-task', '1234'],

-                                     can_fail=True, logfile=None)])

+                                     can_fail=True, logfile=None, universal_newlines=True)])

          self.assertEqual(sleep.mock_calls, [mock.call(i * 10) for i in range(1, 2)])

  

  

file modified
+2 -2
@@ -328,7 +328,7 @@ 

  

          phase = LiveMediaPhase(compose)

  

-         with self.assertRaisesRegexp(RuntimeError, r'no.+Missing.+when building.+Server'):

+         with self.assertRaisesRegex(RuntimeError, r'no.+Missing.+when building.+Server'):

              phase.run()

  

      @mock.patch('pungi.phases.livemedia_phase.ThreadPool')
@@ -353,7 +353,7 @@ 

  

          phase = LiveMediaPhase(compose)

  

-         with self.assertRaisesRegexp(RuntimeError, r'There is no variant Missing to get repo from.'):

+         with self.assertRaisesRegex(RuntimeError, r'There is no variant Missing to get repo from.'):

              phase.run()

  

      @mock.patch('pungi.util.resolve_git_url')

@@ -1,4 +1,3 @@ 

- #!/usr/bin/env python2

  # -*- coding: utf-8 -*-

  

  try:
@@ -59,7 +58,3 @@ 

                                 '--add-arch-template-var=va1', '--add-arch-template-var=va2',

                                 '--logfile=/tmp/lorax.log',

                                 '/mnt/output_dir'])

- 

- 

- if __name__ == "__main__":

-     unittest.main()

file modified
+4 -1
@@ -6,7 +6,10 @@ 

  import mock

  import os

  import sys

- import unittest

+ try:

+     import unittest2 as unittest

+ except ImportError:

+     import unittest

  

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

  

file modified
+1 -1
@@ -443,7 +443,7 @@ 

          with self.assertRaises(RuntimeError) as ctx:

              self.t.process((self.compose, self.compose.variants['Server'], cfg), 1)

  

-         self.assertRegexpMatches(str(ctx.exception), r"task 12345 failed: see .+ for details")

+         self.assertRegex(str(ctx.exception), r"task 12345 failed: see .+ for details")

  

      @mock.patch('pungi.util.resolve_git_url')

      @mock.patch('pungi.phases.osbs.kojiwrapper.KojiWrapper')

@@ -2,7 +2,6 @@ 

  # -*- coding: utf-8 -*-

  

  

- import unittest

  import mock

  

  import os
@@ -360,6 +359,3 @@ 

                                            '--add-arch-template-var=ostree_repo=http://www.example.com/ostree',

                                            '--rootfs-size=None',

                                            self.output])])

- 

- if __name__ == '__main__':

-     unittest.main()

file modified
+1 -1
@@ -39,7 +39,7 @@ 

          log = mock.Mock()

          patch_iso.sh(log, ['ls'], foo='bar')

          self.assertEqual(mock_run.call_args_list,

-                          [mock.call(['ls'], foo='bar')])

+                          [mock.call(['ls'], foo='bar', universal_newlines=True)])

          self.assertEqual(log.info.call_args_list,

                           [mock.call('Running: %s', 'ls')])

          self.assertEqual(log.debug.call_args_list,

file modified
+6 -6
@@ -121,7 +121,7 @@ 

  

  class PkgsetCompareMixin(object):

      def assertPkgsetEqual(self, actual, expected):

-         for k, v1 in expected.iteritems():

+         for k, v1 in expected.items():

              self.assertIn(k, actual)

              v2 = actual.pop(k)

              self.assertItemsEqual(v1, v2)
@@ -148,7 +148,7 @@ 

              helpers.touch(os.path.join(self.topdir, filename))

  

      def assertPkgsetEqual(self, actual, expected):

-         for k, v1 in expected.iteritems():

+         for k, v1 in expected.items():

              self.assertIn(k, actual)

              v2 = actual.pop(k)

              self.assertItemsEqual(v1, v2)
@@ -259,8 +259,8 @@ 

              self.koji_wrapper.koji_proxy.mock_calls,

              [mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])

  

-         self.assertRegexpMatches(str(ctx.exception),

-                                  r'^RPM\(s\) not found for sigs: .+Check log for details.+')

+         self.assertRegex(str(ctx.exception),

+                          r'^RPM\(s\) not found for sigs: .+Check log for details.+')

  

      def test_can_not_find_any_package(self):

          pkgset = pkgsets.KojiPackageSet(self.koji_wrapper, ['cafebabe', None], arches=['x86_64'])
@@ -272,8 +272,8 @@ 

              self.koji_wrapper.koji_proxy.mock_calls,

              [mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])

  

-         self.assertRegexpMatches(str(ctx.exception),

-                                  r'^RPM\(s\) not found for sigs: .+Check log for details.+')

+         self.assertRegex(str(ctx.exception),

+                          r'^RPM\(s\) not found for sigs: .+Check log for details.+')

  

      def test_packages_attribute(self):

          self._touch_files([

@@ -94,11 +94,11 @@ 

          self.koji_wrapper = mock.Mock()

          self.pkgset_path = os.path.join(self.topdir, 'work', 'global', 'pkgset_global.pickle')

  

-     @mock.patch('cPickle.dumps')

+     @mock.patch('six.moves.cPickle.dumps')

      @mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet')

      def test_populate(self, KojiPackageSet, pickle_dumps):

  

-         pickle_dumps.return_value = 'DATA'

+         pickle_dumps.return_value = b'DATA'

  

          orig_pkgset = KojiPackageSet.return_value

  
@@ -117,7 +117,7 @@ 

          with open(self.pkgset_path) as f:

              self.assertEqual(f.read(), 'DATA')

  

-     @mock.patch('cPickle.dumps')

+     @mock.patch('six.moves.cPickle.dumps')

      @mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet')

      def test_populate_with_multiple_koji_tags(self, KojiPackageSet, pickle_dumps):

          self.compose = helpers.DummyCompose(self.topdir, {
@@ -126,7 +126,7 @@ 

          })

          self.compose.DEBUG = False

  

-         pickle_dumps.return_value = 'DATA'

+         pickle_dumps.return_value = b'DATA'

  

          orig_pkgset = KojiPackageSet.return_value

  
@@ -147,7 +147,7 @@ 

          with open(self.pkgset_path) as f:

              self.assertEqual(f.read(), 'DATA')

  

-     @mock.patch('cPickle.load')

+     @mock.patch('six.moves.cPickle.load')

      def test_populate_in_debug_mode(self, pickle_load):

          helpers.touch(self.pkgset_path, 'DATA')

          self.compose.DEBUG = True
@@ -167,7 +167,7 @@ 

              [mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',

                                        remove_path_prefix='/prefix')])

  

-     @mock.patch('cPickle.dumps')

+     @mock.patch('six.moves.cPickle.dumps')

      @mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate')

      @mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list')

      def test_populate_packages_to_gather(self, save_file_list, popuplate,
@@ -182,7 +182,7 @@ 

              ]

          })

          self.compose.DEBUG = False

-         pickle_dumps.return_value = 'DATA'

+         pickle_dumps.return_value = b'DATA'

  

          pkgset = source_koji.populate_global_pkgset(

              self.compose, self.koji_wrapper, '/prefix', 123456)

file modified
+2 -2
@@ -213,7 +213,7 @@ 

                                         self.destdir)

          self.assertStructure(retval, ['some_file.txt'])

          self.assertEqual(1, len(commands))

-         self.assertRegexpMatches(

+         self.assertRegex(

              commands[0],

              r'/usr/bin/git clone --depth 1 --branch=master https://example.com/git/repo.git /tmp/.+')

  
@@ -308,7 +308,7 @@ 

                                        self.destdir)

          self.assertStructure(retval, ['first', 'second'])

  

-         self.assertRegexpMatches(

+         self.assertRegex(

              commands[0],

              r'/usr/bin/git clone --depth 1 --branch=master https://example.com/git/repo.git /tmp/.+')

          self.assertEqual(commands[1:], ['make'])

file modified
+7 -7
@@ -17,12 +17,12 @@ 

  from tests.helpers import DummyCompose, PungiTestCase, touch, mk_boom

  

  

- PAD = '\0' * 100

- UNBOOTABLE_ISO = ('\0' * 0x8001) + 'CD001' + PAD

- ISO_WITH_MBR = ('\0' * 0x1fe) + '\x55\xAA' + ('\0' * 0x7e01) + 'CD001' + PAD

- ISO_WITH_GPT = ('\0' * 0x200) + 'EFI PART' + ('\0' * 0x7df9) + 'CD001' + PAD

- ISO_WITH_MBR_AND_GPT = ('\0' * 0x1fe) + '\x55\xAAEFI PART' + ('\0' * 0x7df9) + 'CD001' + PAD

- ISO_WITH_TORITO = ('\0' * 0x8001) + 'CD001' + ('\0' * 0x7fa) + '\0CD001\1EL TORITO SPECIFICATION' + PAD

+ PAD = b'\0' * 100

+ UNBOOTABLE_ISO = (b'\0' * 0x8001) + b'CD001' + PAD

+ ISO_WITH_MBR = (b'\0' * 0x1fe) + b'\x55\xAA' + (b'\0' * 0x7e01) + b'CD001' + PAD

+ ISO_WITH_GPT = (b'\0' * 0x200) + b'EFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD

+ ISO_WITH_MBR_AND_GPT = (b'\0' * 0x1fe) + b'\x55\xAAEFI PART' + (b'\0' * 0x7df9) + b'CD001' + PAD

+ ISO_WITH_TORITO = (b'\0' * 0x8001) + b'CD001' + (b'\0' * 0x7fa) + b'\0CD001\1EL TORITO SPECIFICATION' + PAD

  

  

  class TestCheckImageSanity(PungiTestCase):
@@ -187,7 +187,7 @@ 

          })

          test_phase.run_repoclosure(compose)

  

-         self.assertItemsEqual(mock_grc.call_args_list, [])

+         self.assertEqual(mock_grc.call_args_list, [])

  

      @mock.patch('pungi.wrappers.repoclosure.get_repoclosure_cmd')

      @mock.patch('pungi.phases.test.run')

file modified
+5 -5
@@ -5,7 +5,7 @@ 

  import os

  import shutil

  import sys

- from ConfigParser import SafeConfigParser

+ from six.moves.configparser import SafeConfigParser

  

  sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))

  
@@ -26,14 +26,14 @@ 

          compose_path = os.path.join(self.topdir, COMPOSE_ID, 'compose')

          isos = unified_isos.UnifiedISO(compose_path)

          self.assertEqual(isos.compose_path, compose_path)

-         self.assertRegexpMatches(isos.temp_dir,

-                                  '^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))

+         self.assertRegex(isos.temp_dir,

+                          '^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))

  

      def test_can_find_compose_subdir(self):

          isos = unified_isos.UnifiedISO(os.path.join(self.topdir, COMPOSE_ID))

          self.assertEqual(isos.compose_path, os.path.join(self.topdir, COMPOSE_ID, 'compose'))

-         self.assertRegexpMatches(isos.temp_dir,

-                                  '^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))

+         self.assertRegex(isos.temp_dir,

+                          '^%s/' % os.path.join(self.topdir, COMPOSE_ID, 'work'))

  

      @mock.patch('os.rename')

      def test_dump_manifest(self, rename):

file modified
+34 -17
@@ -29,7 +29,8 @@ 

          url = util.resolve_git_url('https://git.example.com/repo.git?somedir#HEAD')

  

          self.assertEqual(url, 'https://git.example.com/repo.git?somedir#CAFEBABE')

-         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'],

+                                     universal_newlines=True)

  

      @mock.patch('pungi.util.run')

      def test_successful_resolve_branch(self, run):
@@ -38,7 +39,8 @@ 

          url = util.resolve_git_url('https://git.example.com/repo.git?somedir#origin/f24')

  

          self.assertEqual(url, 'https://git.example.com/repo.git?somedir#CAFEBABE')

-         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'refs/heads/f24'])

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'refs/heads/f24'],

+                                     universal_newlines=True)

  

      @mock.patch('pungi.util.run')

      def test_resolve_missing_spec(self, run):
@@ -61,7 +63,8 @@ 

          with self.assertRaises(RuntimeError):

              util.resolve_git_url('https://git.example.com/repo.git?somedir#HEAD')

  

-         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'],

+                                     universal_newlines=True)

  

      @mock.patch('pungi.util.run')

      def test_resolve_keep_empty_query_string(self, run):
@@ -69,7 +72,8 @@ 

  

          url = util.resolve_git_url('https://git.example.com/repo.git?#HEAD')

  

-         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'],

+                                     universal_newlines=True)

          self.assertEqual(url, 'https://git.example.com/repo.git?#CAFEBABE')

  

      @mock.patch('pungi.util.run')
@@ -78,7 +82,8 @@ 

  

          url = util.resolve_git_url('git+https://git.example.com/repo.git#HEAD')

  

-         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])

+         run.assert_called_once_with(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'],

+                                     universal_newlines=True)

          self.assertEqual(url, 'git+https://git.example.com/repo.git#CAFEBABE')

  

      @mock.patch('pungi.util.run')
@@ -89,7 +94,8 @@ 

              util.resolve_git_url('https://git.example.com/repo.git?somedir#origin/my-branch')

  

          run.assert_called_once_with(

-             ['git', 'ls-remote', 'https://git.example.com/repo.git', 'refs/heads/my-branch'])

+             ['git', 'ls-remote', 'https://git.example.com/repo.git', 'refs/heads/my-branch'],

+             universal_newlines=True)

          self.assertIn('ref does not exist in remote repo', str(ctx.exception))

  

      @mock.patch('time.sleep')
@@ -102,7 +108,8 @@ 

          self.assertEqual(url, 'https://git.example.com/repo.git?somedir#CAFEBABE')

          self.assertEqual(sleep.call_args_list, [mock.call(30)])

          self.assertEqual(run.call_args_list,

-                          [mock.call(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'])] * 2)

+                          [mock.call(['git', 'ls-remote', 'https://git.example.com/repo.git', 'HEAD'],

+                                     universal_newlines=True)] * 2)

  

  

  class TestGetVariantData(unittest.TestCase):
@@ -373,7 +380,8 @@ 

          mockPopen.side_effect = [self._fakeProc(0, '')]

          util.run_unmount_cmd(cmd)

          self.assertEqual(mockPopen.call_args_list,

-                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)])

+                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                                     universal_newlines=True)])

  

      @mock.patch('subprocess.Popen')

      def test_unmount_cmd_fail_other_reason(self, mockPopen):
@@ -384,7 +392,8 @@ 

          self.assertEqual(str(ctx.exception),

                           "Unhandled error when running 'unmount': 'It is broken'")

          self.assertEqual(mockPopen.call_args_list,

-                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)])

+                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                                     universal_newlines=True)])

  

      @mock.patch('time.sleep')

      @mock.patch('subprocess.Popen')
@@ -395,7 +404,8 @@ 

                                   self._fakeProc(0, '')]

          util.run_unmount_cmd(cmd)

          self.assertEqual(mockPopen.call_args_list,

-                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)] * 3)

+                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                                     universal_newlines=True)] * 3)

          self.assertEqual(mock_sleep.call_args_list,

                           [mock.call(0), mock.call(1)])

  
@@ -409,7 +419,8 @@ 

          with self.assertRaises(RuntimeError) as ctx:

              util.run_unmount_cmd(cmd, max_retries=3)

          self.assertEqual(mockPopen.call_args_list,

-                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)] * 3)

+                          [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                                     universal_newlines=True)] * 3)

          self.assertEqual(mock_sleep.call_args_list,

                           [mock.call(0), mock.call(1), mock.call(2)])

          self.assertEqual(str(ctx.exception), "Failed to run 'unmount': Device or resource busy.")
@@ -427,15 +438,21 @@ 

          with self.assertRaises(RuntimeError) as ctx:

              util.fusermount('/path', max_retries=3, logger=logger)

          cmd = ['fusermount', '-u', '/path']

-         expected = [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE),

-                     mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE),

-                     mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE),

+         expected = [mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                               universal_newlines=True),

+                     mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                               universal_newlines=True),

+                     mock.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,

+                               universal_newlines=True),

                      mock.call(['ls', '-lA', '/path'],

-                               stderr=subprocess.STDOUT, stdout=subprocess.PIPE),

+                               stderr=subprocess.STDOUT, stdout=subprocess.PIPE,

+                               universal_newlines=True),

                      mock.call(['fuser', '-vm', '/path'],

-                               stderr=subprocess.STDOUT, stdout=subprocess.PIPE),

+                               stderr=subprocess.STDOUT, stdout=subprocess.PIPE,

+                               universal_newlines=True),

                      mock.call(['lsof', '+D', '/path'],

-                               stderr=subprocess.STDOUT, stdout=subprocess.PIPE)]

+                               stderr=subprocess.STDOUT, stdout=subprocess.PIPE,

+                               universal_newlines=True)]

          self.assertEqual(mockPopen.call_args_list, expected)

          self.assertEqual(mock_sleep.call_args_list,

                           [mock.call(0), mock.call(1), mock.call(2)])

This should make all tests pass on both Python 2 and Python 3.

Unittest2 is required on Py 2.6 and Py 3.

This also requires a py3 compatible version of kobo that is not released yet.

Not even a dummy compose can work with the proposed patch for Kobo. There needs to be more changes here or there.

1 new commit added

  • Use universal_newlines when running other commands
6 years ago

2 new commits added

  • Use universal_newlines when running other commands
  • Port to Python 3
6 years ago

2 new commits added

  • Use universal_newlines when running other commands
  • Port to Python 3
6 years ago

rebased onto 80cb396c6cbb699af066c2f89ce3c537eaf86b82

6 years ago

1 new commit added

  • Explicitly decode test files as UTF-8
6 years ago

1 new commit added

  • Convert configparser values to string
6 years ago

rebased onto 9b633dfb5f1625cd3ae6a968fcc1955a5341e161

6 years ago

2 new commits added

  • Reject yum gather backend on Python 3
  • Stop using deprecated pipes.quote
6 years ago

rebased onto e193c7afbad12e60638a752e6af7f271b812a921

6 years ago

rebased onto ed22e07

6 years ago

Pull-Request has been merged by lsedlar

6 years ago
Changes Summary 63
+3 -4
file changed
bin/pungi-koji
+6 -4
file changed
doc/configuration.rst
+1 -1
file changed
pungi/__init__.py
+16 -7
file changed
pungi/checks.py
+2 -2
file changed
pungi/compose.py
+3 -3
file changed
pungi/createiso.py
+4 -4
file changed
pungi/gather_dnf.py
+1 -1
file changed
pungi/metadata.py
+1 -1
file changed
pungi/multilib_dnf.py
+6 -6
file changed
pungi/phases/buildinstall.py
+2 -2
file changed
pungi/phases/createiso.py
+4 -4
file changed
pungi/phases/createrepo.py
+8 -8
file changed
pungi/phases/gather/__init__.py
+2 -2
file changed
pungi/phases/gather/sources/source_json.py
+1 -1
file changed
pungi/phases/image_build.py
+2 -2
file changed
pungi/phases/image_checksum.py
+2 -2
file changed
pungi/phases/live_images.py
+1 -1
file changed
pungi/phases/livemedia_phase.py
+4 -4
file changed
pungi/phases/ostree_installer.py
+2 -2
file changed
pungi/phases/pkgset/__init__.py
+1 -1
file changed
pungi/phases/pkgset/sources/source_koji.py
+1 -1
file changed
pungi/phases/pkgset/sources/source_repos.py
+17 -17
file changed
pungi/phases/product_img.py
+5 -5
file changed
pungi/phases/test.py
+19 -18
file changed
pungi/util.py
+1 -1
file changed
pungi/wrappers/comps.py
+8 -6
file changed
pungi/wrappers/iso.py
+16 -10
file changed
pungi/wrappers/kojiwrapper.py
+2 -2
file changed
pungi/wrappers/pungi.py
+2 -2
file changed
pungi/wrappers/repoclosure.py
+9 -8
file changed
pungi/wrappers/scm.py
+17 -24
file changed
pungi/wrappers/variants.py
+5 -7
file changed
pungi_utils/patch_iso.py
+3 -3
file changed
pungi_utils/unified_isos.py
+6 -3
file changed
tests/helpers.py
+8 -4
file changed
tests/test_arguments.py
+1 -0
file changed
tests/test_buildinstall.py
+30 -35
file changed
tests/test_checks.py
+6 -6
file changed
tests/test_compose.py
+4 -4
file changed
tests/test_comps_wrapper.py
+38 -5
file changed
tests/test_config.py
+2 -7
file changed
tests/test_config_validate_script.py
+4 -1
file changed
tests/test_createiso_phase.py
+2 -2
file changed
tests/test_createiso_script.py
+6 -5
file changed
tests/test_createrepophase.py
+1 -1
file changed
tests/test_extra_files_phase.py
+7 -3
file changed
tests/test_gather.py
+3 -0
file changed
tests/test_gather_phase.py
+2 -2
file changed
tests/test_initphase.py
+4 -2
file changed
tests/test_iso_wrapper.py
+38 -23
file changed
tests/test_koji_wrapper.py
+2 -2
file changed
tests/test_livemediaphase.py
+0 -5
file changed
tests/test_lorax_wrapper.py
+4 -1
file changed
tests/test_notifier.py
+1 -1
file changed
tests/test_osbs_phase.py
+0 -4
file changed
tests/test_ostree_script.py
+1 -1
file changed
tests/test_patch_iso.py
+6 -6
file changed
tests/test_pkgset_pkgsets.py
+7 -7
file changed
tests/test_pkgset_source_koji.py
+2 -2
file changed
tests/test_scm.py
+7 -7
file changed
tests/test_test_phase.py
+5 -5
file changed
tests/test_unified_isos.py
+34 -17
file changed
tests/test_util.py