#8313 Make releng srcipts more pep8 compliant
Opened a year ago by humaton. Modified a year ago
humaton/releng pep8  into  master

file modified
+3 -3

@@ -1,12 +1,12 @@ 

  PYTHON=python3

- PEP8=$(PYTHON)-pep8

+ PEP8=pycodestyle-3

  COVERAGE=coverage

  ifeq ($(PYTHON),python3)

    COVERAGE=coverage3

  endif

  

- TEST_DEPENDENCIES = python3-pep8 python3-pocketlint

- TEST_DEPENDENCIES += python3-koji fedora-cert packagedb-cli

+ TEST_DEPENDENCIES = python3-pycodestyle python3-pocketlint

+ TEST_DEPENDENCIES += python3-koji packagedb-cli

  TEST_DEPENDENCIES += python3-fedmsg-core python3-configparser

  TEST_DEPENDENCIES := $(shell echo $(sort $(TEST_DEPENDENCIES)) | uniq)

  

file modified
+3 -5

@@ -12,7 +12,6 @@ 

  import pkgdb2client

  import requests

  

- 

  log = logging.getLogger(__name__)

  RETIRING_BRANCHES = ["el6", "epel7", "f30", "master"]

  PROD_ONLY_BRANCHES = ["el6", "epel7", "f30", "master"]

@@ -28,7 +27,6 @@ 

  

  

  class SubjectSMTPHandler(logging.handlers.SMTPHandler):

- 

      subject_prefix = ""

  

      def getSubject(self, record):

@@ -216,9 +214,9 @@ 

      cmd = ["block-pkg", tag] + packages

      catch_koji_errors(cmd)

  

-     #If a package moves from EPEL to RHEL it can only be built if it is unblocked

-     #in the epel build tag. Therefore unblock all retired EPEL packages in the

-     #built tag since it does not hurt if the package does not move to RHEL.

+     # If a package moves from EPEL to RHEL it can only be built if it is unblocked

+     # in the epel build tag. Therefore unblock all retired EPEL packages in the

+     # built tag since it does not hurt if the package does not move to RHEL.

      if epel_build_tag:

          cmd = ["unblock-pkg", epel_build_tag] + packages

          catch_koji_errors(cmd)

@@ -23,11 +23,9 @@ 

  from pdc_client import PDCClient

  from yaml import dump

  

- 

  requests_cache.install_cache('modulepkg_cache')

  log = logging.getLogger(__name__)

  

- 

  MODULES_SRC_URL = "https://src.fedoraproject.org/modules/"

  PDC_DEVELOP = True

  PDC_URL_PROD = "https://pdc.fedoraproject.org/rest_api/v1/"

@@ -84,8 +82,7 @@ 

                                      for m in mods]):

              found_mods = []

              for m in mods:

-                 if (name == m.get('variant_name') and

-                    version == m.get('variant_version')):

+                 if (name == m.get('variant_name') and version == m.get('variant_version')):

                      found_mods.append(m)

  

              modules.extend(found_mods)

@@ -170,12 +167,12 @@ 

              m_dict['ref'] = mmd_rpm.ref

          if parser_args.filter_api:

              api = package if parser_args.filter_api == 1 \

-                           else parser_args.filter_api

+                 else parser_args.filter_api

              if api not in mmd.api.rpms:

                  filtered = True

          if parser_args.filter_profile:

              profile = 'default' if parser_args.filter_profile == 1 \

-                                 else parser_args.filter_profile

+                 else parser_args.filter_profile

              try:

                  mmd_profile = mmd.profiles[profile]

              except KeyError:

@@ -248,7 +245,7 @@ 

                          action="store_true", help="A more compact output")

      parser.add_argument("-s", "--short-descriptions", default=False,

                          action="store_true", help="Use short (greppable)"

-                         " descriptions")

+                                                   " descriptions")

      parser.add_argument("packages", nargs="*", metavar="package",

                          help="Package(s) to look up for depending modules")

      parser.add_argument(

file modified
+3 -2

@@ -36,7 +36,7 @@ 

      # more digits of precision than str(time.time())

      return '%s/%r.%s' % (prefix, time.time(),

                           ''.join([random.choice(string.ascii_letters)

-                                  for i in range(8)]))

+                                   for i in range(8)]))

  

  

  def _rpmvercmp((e1, v1, r1), (e2, v2, r2)):

@@ -131,6 +131,7 @@ 

      tagSuccessful(build, tag)

      return True

  

+ 

  # setup the koji session

  logging.info('Setting up koji session')

  local_koji_module = koji.get_profile_module("arm")

@@ -196,7 +197,7 @@ 

              logging.debug("Local Complete Build: %s" % nvr)

              continue

          else:

-             parentevr = (str(epoch), version,  release)

+             parentevr = (str(epoch), version, release)

              latestevr = (str(localLatestBuild[0]['epoch']),

                           localLatestBuild[0]['version'],

                           localLatestBuild[0]['release'])

file modified
+43 -37

@@ -14,7 +14,7 @@ 

  import time

  import random

  import string

- import rpm 

+ import rpm

  import shutil

  

  PACKAGEURL = 'http://kojipkgs.fedoraproject.org/'

@@ -25,6 +25,7 @@ 

  logging.basicConfig(format='%(levelname)s: %(message)s',

                      level=loglevel)

  

+ 

  def _unique_path(prefix):

      """Create a unique path fragment by appending a path component

      to prefix.  The path component will consist of a string of letter and numbers

@@ -34,43 +35,47 @@ 

      # For some reason repr(time.time()) includes 4 or 5

      # more digits of precision than str(time.time())

      return '%s/%r.%s' % (prefix, time.time(),

-                       ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+                          ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+ 

  

- def _rpmvercmp ((e1, v1, r1), (e2, v2, r2)):

+ def _rpmvercmp((e1, v1, r1), (e2, v2, r2)):

      """find out which build is newer"""

      rc = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))

      if rc == 1:

-         #first evr wins

+         # first evr wins

          return 1

      elif rc == 0:

-         #same evr

+         # same evr

          return 0

      else:

-         #second evr wins

+         # second evr wins

          return -1

  

+ 

  def isNoarch(rpms):

      if not rpms:

          return False

      noarch = False

      for rpminfo in rpms:

          if rpminfo['arch'] == 'noarch':

-             #note that we've seen a noarch rpm

+             # note that we've seen a noarch rpm

              noarch = True

          elif rpminfo['arch'] != 'src':

              return False

      return noarch

  

+ 

  def tagSuccessful(nvr, tag):

      """tag completed builds into final tags"""

      localkojisession.tagBuildBypass(tag, nvr)

      print("tagged %s to %s" % (nvr, tag))

  

+ 

  def _downloadURL(url, destf):

      """Download a url and save it to a file"""

-     file = grabber.urlopen(url, progress_obj = pg, text = "%s" % (destf))

+     file = grabber.urlopen(url, progress_obj=pg, text="%s" % (destf))

  

-     out = os.open(destf, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0666)

+     out = os.open(destf, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666)

      try:

          while 1:

              buf = file.read(4096)

@@ -81,24 +86,26 @@ 

          os.close(out)

          file.close()

  

+ 

  def _importURL(url, fn):

      """Import an rpm directly from a url"""

      serverdir = _unique_path('build-recent')

-     #TODO - would be possible, using uploadFile directly, to upload without writing locally.

-     #for now, though, just use uploadWrapper

+     # TODO - would be possible, using uploadFile directly, to upload without writing locally.

+     # for now, though, just use uploadWrapper

      koji.ensuredir(workpath)

      dst = "%s/%s" % (workpath, fn)

      print("Downloading %s to %s..." % (url, dst))

      _downloadURL(url, dst)

-     #fsrc = urllib2.urlopen(url)

-     #fdst = file(dst, 'w')

-     #shutil.copyfileobj(fsrc, fdst)

-     #fsrc.close()

-     #fdst.close()

+     # fsrc = urllib2.urlopen(url)

+     # fdst = file(dst, 'w')

+     # shutil.copyfileobj(fsrc, fdst)

+     # fsrc.close()

+     # fdst.close()

      print("Uploading %s..." % dst)

      localkojisession.uploadWrapper(dst, serverdir, blocksize=65536)

      localkojisession.importRPM(serverdir, fn)

  

+ 

  def importBuild(build, rpms, buildinfo, tag=None):

      '''import a build from remote hub'''

      for rpminfo in rpms:

@@ -111,7 +118,7 @@ 

      _importURL(url, fname)

      for rpminfo in rpms:

          if rpminfo['arch'] == 'src':

-             #already imported above

+             # already imported above

              continue

          relpath = pathinfo.rpm(rpminfo)

          url = "%s/%s" % (build_url, relpath)

@@ -122,6 +129,7 @@ 

      tagSuccessful(build, tag)

      return True

  

+ 

  # setup the koji session

  logging.info('Setting up koji session')

  local_koji_module = koji.get_profile_module("arm")

@@ -132,7 +140,7 @@ 

  

  tag = 'dist-f16'

  

- ignorelist="kernel anaconda CodeAnalyst-gui Glide3 Glide3-libGL LabPlot R-bigmemory alex alt-ergo acpid apmd apmud athcool bibtex2html biosdevname bluez-hcidump camstream ccid ccsm cdrdao cduce darcs appliance-tools cmospwd cmucl coccinelle compat-gcc-296 compiz-bcop compiz-fusion-extras compiz-fusion-unsupported compizconfig-backend-gconf compizconfig-backend-kconfig compizconfig-python cabal-install compiz-fusion coq coredumper cpufrequtils cpuid cpuspeed csisat compiz hlint dmidecode dvgrab cpphs dssi-vst librdmacm edac-utils efax efibootmgr eject elilo esc ext3grep fbset fedora-ksplice emerald minicom coolkey firecontrol firmware-addon-dell fpc fprint_demo fprintd freeipmi freetennis ghc ghc-GLUT ghc-HUnit ghc-OpenGL ghc-X11 ghc-X11-xft ghc-editline ghc-fgl ghc-ghc-paths ghc-gtk2hs ghc-haskell-src ghc-html ghc-mmap ghc-mtl ghc-parallel ghc-parsec ghc-regex-base ghc-regex-compat ghc-regex-posix ghc-stm ghc-tar ghc-haskeline ghc-xhtml ghc-xmonad-contrib ghc-zlib k3b gkrellm-wifi grub2 gnome-do-plugins ghc-haskell-src-exts gnome-pilot gnome-pilot-conduits ghc-uniplate gnu-efi gpart gphoto2 gprolog openobex gsynaptics ghc-HTTP gtksourceview-sharp jpilot-backup eclipse-cdt happy haskell-platform hdparm hevea pilot-link i2c-tools i8kutils ibmasm ifd-egate grub inkscape ghc-cgi ioport iprutils ipw2100-firmware ipw2200-firmware irda-utils irqbalance isdn4k-utils joystick jpilot flashrom kpilot ksensors ksplice latrace lazarus libavc1394 libbsr libcompizconfig libcxgb3 libdc1394 libfprint hscolour libibcm libibcommon libibverbs libiec61883 libraw1394 librtas libsmbios libspe2 libunwind libusb1 hplip libx86 lightning lrmi obexd gnome-media maxima mcelog mediawiki memtest86+ nut libbtctl mkbootdisk mldonkey mod_mono mono-basic monotone-viz msr-tools nspluginwrapper seabios obex-data-server ocaml ocaml-SDL ocaml-ancient ocaml-augeas ocaml-bisect ocaml-bitstring ocaml-cairo ocaml-calendar ocaml-camlidl ocaml-camlimages ocaml-camlp5 ocaml-camomile ocaml-cil ocaml-cmigrep ocaml-csv ocaml-cryptokit ocaml-curl ocaml-curses ocaml-dbus ocaml-deriving ocaml-expat ocaml-extlib ocaml-facile ocaml-fileutils ocaml-findlib ocaml-gettext ocaml-gsl ocaml-json-static ocaml-json-wheel ocaml-lablgl ocaml-lablgtk ocaml-lacaml ocaml-libvirt ocaml-lwt ocaml-mikmatch ocaml-mlgmpidl ocaml-mysql ocaml-newt ocaml-ocamlgraph ocaml-ocamlnet ocaml-omake ocaml-openin ocaml-ounit ocaml-p3l ocaml-pa-do ocaml-pa-monad ocaml-pcre ocaml-perl4caml ocaml-pgocaml ocaml-postgresql ocaml-preludeml ocaml-pxp ocaml-reins ocaml-res ocaml-sexplib ocaml-sqlite ocaml-ssl ocaml-type-conv ocaml-ulex ocaml-xml-light ocaml-xmlrpc-light ocaml-zip ocamldsort ohm olpc-kbdshim olpc-powerd setserial ghc-dataenc ghc-hashed-storage libdv libibmad libhid pcc xorg-x11-drv-openchrome ghc-binary system-config-kdump libibumad pidgin libcrystalhd picprog planets pmtools podsleuth powerpc-utils powerpc-utils-papr ppc64-utils microcode_ctl procbench ps3-utils pvs-sbcl numactl python-iwlib python-psyco eclipse-changelog pyxf86config openmpi pcmciautils openscada rp-pppoe rpmdepsize s3switch sbcl eclipse-rpm-editor rhythmbox opensm sound-juicer spicctrl spring-installer stapitrace statserial svgalib syslinux sysprof system-config-boot system-config-display tbb ghc-QuickCheck tpb tuxcmd tvtime unetbootin unison213 unison227 valgrind vbetool ghc-network viaideinfo yaboot virt-mem virt-top vrq wacomexpresskeys xenner why wine wraplinux wxMaxima wyrd x86info xen xfce4-sensors-plugin xmonad xorg-x11-drv-acecad xorg-x11-drv-aiptek xorg-x11-drv-apm xorg-x11-drv-ark xorg-x11-drv-ast xorg-x11-drv-chips xorg-x11-drv-cirrus xorg-x11-drv-dummy xorg-x11-drv-elographics xorg-x11-drv-evdev xorg-x11-drv-fbdev xorg-x11-drv-geode xorg-x11-drv-glint xorg-x11-drv-hyperpen xorg-x11-drv-i128 xorg-x11-drv-i740 xorg-x11-drv-intel xorg-x11-drv-ivtv xorg-x11-drv-keyboard xorg-x11-drv-mach64 xorg-x11-drv-mga xorg-x11-drv-mouse xorg-x11-drv-mutouch xorg-x11-drv-neomagic xorg-x11-drv-nv xorg-x11-drv-penmount xorg-x11-drv-r128 xorg-x11-drv-radeonhd xorg-x11-drv-rendition xorg-x11-drv-s3 xorg-x11-drv-s3virge xorg-x11-drv-savage xorg-x11-drv-siliconmotion xorg-x11-drv-sis xorg-x11-drv-sisusb xorg-x11-drv-tdfx xorg-x11-drv-trident xorg-x11-drv-tseng xorg-x11-drv-v4l xorg-x11-drv-vesa xorg-x11-drv-vmware xorg-x11-drv-void xorg-x11-drv-voodoo xsp zenon zfs-fuse xorg-x11-drv-fpit libmlx4 libmthca rxtx xorg-x11-drv-vmmouse xorg-x11-drv-synaptics xorg-x11-drv-nouveau xorg-x11-drv-ati superiotool xorg-x11-drivers xorg-x11-drv-qxl qpid-cpp xorg-x11-drv-wacom openoffice.org"

+ ignorelist = "kernel anaconda CodeAnalyst-gui Glide3 Glide3-libGL LabPlot R-bigmemory alex alt-ergo acpid apmd apmud athcool bibtex2html biosdevname bluez-hcidump camstream ccid ccsm cdrdao cduce darcs appliance-tools cmospwd cmucl coccinelle compat-gcc-296 compiz-bcop compiz-fusion-extras compiz-fusion-unsupported compizconfig-backend-gconf compizconfig-backend-kconfig compizconfig-python cabal-install compiz-fusion coq coredumper cpufrequtils cpuid cpuspeed csisat compiz hlint dmidecode dvgrab cpphs dssi-vst librdmacm edac-utils efax efibootmgr eject elilo esc ext3grep fbset fedora-ksplice emerald minicom coolkey firecontrol firmware-addon-dell fpc fprint_demo fprintd freeipmi freetennis ghc ghc-GLUT ghc-HUnit ghc-OpenGL ghc-X11 ghc-X11-xft ghc-editline ghc-fgl ghc-ghc-paths ghc-gtk2hs ghc-haskell-src ghc-html ghc-mmap ghc-mtl ghc-parallel ghc-parsec ghc-regex-base ghc-regex-compat ghc-regex-posix ghc-stm ghc-tar ghc-haskeline ghc-xhtml ghc-xmonad-contrib ghc-zlib k3b gkrellm-wifi grub2 gnome-do-plugins ghc-haskell-src-exts gnome-pilot gnome-pilot-conduits ghc-uniplate gnu-efi gpart gphoto2 gprolog openobex gsynaptics ghc-HTTP gtksourceview-sharp jpilot-backup eclipse-cdt happy haskell-platform hdparm hevea pilot-link i2c-tools i8kutils ibmasm ifd-egate grub inkscape ghc-cgi ioport iprutils ipw2100-firmware ipw2200-firmware irda-utils irqbalance isdn4k-utils joystick jpilot flashrom kpilot ksensors ksplice latrace lazarus libavc1394 libbsr libcompizconfig libcxgb3 libdc1394 libfprint hscolour libibcm libibcommon libibverbs libiec61883 libraw1394 librtas libsmbios libspe2 libunwind libusb1 hplip libx86 lightning lrmi obexd gnome-media maxima mcelog mediawiki memtest86+ nut libbtctl mkbootdisk mldonkey mod_mono mono-basic monotone-viz msr-tools nspluginwrapper seabios obex-data-server ocaml ocaml-SDL ocaml-ancient ocaml-augeas ocaml-bisect ocaml-bitstring ocaml-cairo ocaml-calendar ocaml-camlidl ocaml-camlimages ocaml-camlp5 ocaml-camomile ocaml-cil ocaml-cmigrep ocaml-csv ocaml-cryptokit ocaml-curl ocaml-curses ocaml-dbus ocaml-deriving ocaml-expat ocaml-extlib ocaml-facile ocaml-fileutils ocaml-findlib ocaml-gettext ocaml-gsl ocaml-json-static ocaml-json-wheel ocaml-lablgl ocaml-lablgtk ocaml-lacaml ocaml-libvirt ocaml-lwt ocaml-mikmatch ocaml-mlgmpidl ocaml-mysql ocaml-newt ocaml-ocamlgraph ocaml-ocamlnet ocaml-omake ocaml-openin ocaml-ounit ocaml-p3l ocaml-pa-do ocaml-pa-monad ocaml-pcre ocaml-perl4caml ocaml-pgocaml ocaml-postgresql ocaml-preludeml ocaml-pxp ocaml-reins ocaml-res ocaml-sexplib ocaml-sqlite ocaml-ssl ocaml-type-conv ocaml-ulex ocaml-xml-light ocaml-xmlrpc-light ocaml-zip ocamldsort ohm olpc-kbdshim olpc-powerd setserial ghc-dataenc ghc-hashed-storage libdv libibmad libhid pcc xorg-x11-drv-openchrome ghc-binary system-config-kdump libibumad pidgin libcrystalhd picprog planets pmtools podsleuth powerpc-utils powerpc-utils-papr ppc64-utils microcode_ctl procbench ps3-utils pvs-sbcl numactl python-iwlib python-psyco eclipse-changelog pyxf86config openmpi pcmciautils openscada rp-pppoe rpmdepsize s3switch sbcl eclipse-rpm-editor rhythmbox opensm sound-juicer spicctrl spring-installer stapitrace statserial svgalib syslinux sysprof system-config-boot system-config-display tbb ghc-QuickCheck tpb tuxcmd tvtime unetbootin unison213 unison227 valgrind vbetool ghc-network viaideinfo yaboot virt-mem virt-top vrq wacomexpresskeys xenner why wine wraplinux wxMaxima wyrd x86info xen xfce4-sensors-plugin xmonad xorg-x11-drv-acecad xorg-x11-drv-aiptek xorg-x11-drv-apm xorg-x11-drv-ark xorg-x11-drv-ast xorg-x11-drv-chips xorg-x11-drv-cirrus xorg-x11-drv-dummy xorg-x11-drv-elographics xorg-x11-drv-evdev xorg-x11-drv-fbdev xorg-x11-drv-geode xorg-x11-drv-glint xorg-x11-drv-hyperpen xorg-x11-drv-i128 xorg-x11-drv-i740 xorg-x11-drv-intel xorg-x11-drv-ivtv xorg-x11-drv-keyboard xorg-x11-drv-mach64 xorg-x11-drv-mga xorg-x11-drv-mouse xorg-x11-drv-mutouch xorg-x11-drv-neomagic xorg-x11-drv-nv xorg-x11-drv-penmount xorg-x11-drv-r128 xorg-x11-drv-radeonhd xorg-x11-drv-rendition xorg-x11-drv-s3 xorg-x11-drv-s3virge xorg-x11-drv-savage xorg-x11-drv-siliconmotion xorg-x11-drv-sis xorg-x11-drv-sisusb xorg-x11-drv-tdfx xorg-x11-drv-trident xorg-x11-drv-tseng xorg-x11-drv-v4l xorg-x11-drv-vesa xorg-x11-drv-vmware xorg-x11-drv-void xorg-x11-drv-voodoo xsp zenon zfs-fuse xorg-x11-drv-fpit libmlx4 libmthca rxtx xorg-x11-drv-vmmouse xorg-x11-drv-synaptics xorg-x11-drv-nouveau xorg-x11-drv-ati superiotool xorg-x11-drivers xorg-x11-drv-qxl qpid-cpp xorg-x11-drv-wacom openoffice.org"

  

  pkgs = remotekojisession.listPackages(tagID=tag, inherited=True)

  

@@ -147,38 +155,38 @@ 

          continue

      pkginfo = remotekojisession.listTagged(tag, inherit=True, package=pkg['package_name'])

      pkgindex = 1

-     if len(pkginfo)>1:

+     if len(pkginfo) > 1:

          logging.info("got build %s" % pkginfo[pkgindex]['nvr'])

-     elif len(pkginfo)==1:

+     elif len(pkginfo) == 1:

          pkgindex = 0

          logging.info("no previous build for %s" % pkg['package_name'])

          logging.info("reverting to current %s" % pkginfo[pkgindex]['nvr'])

      else:

-        # We apparently have 0 builds for this package!

-        logging.info("no builds for %s - skipping" % pkg['package_name'])

-        continue

+         # We apparently have 0 builds for this package!

+         logging.info("no builds for %s - skipping" % pkg['package_name'])

+         continue

      nvr = pkginfo[pkgindex]['nvr']

      name = pkginfo[pkgindex]['package_name']

      epoch = pkginfo[pkgindex]['epoch']

      version = pkginfo[pkgindex]['version']

-     release =  pkginfo[pkgindex]['release']

+     release = pkginfo[pkgindex]['release']

      build_id = pkginfo[pkgindex]['build_id']

      task_id = pkginfo[pkgindex]['task_id']

  

- 

      # check if we have the nvr built or not

      localBuild = localkojisession.getBuild(nvr)

      # if we have never built the nvr on our target hub localBuild is None localLatestBuild wil be empty as well if we have never built it

      # in which case we have nothing to compare and we need to build it

      localLatestBuild = localkojisession.getLatestBuilds(tag, package=str(pkg['package_name']))

-     if not localBuild == None and not localLatestBuild == []:

+     if localBuild is not None and not localLatestBuild == []:

          if localBuild['state'] == 1:

              logging.debug("Local Complete Build: %s" % nvr)

              continue

          else:

-             parentevr = (str(epoch), version,  release)

-             latestevr =  (str(localLatestBuild[0]['epoch']), localLatestBuild[0]['version'], localLatestBuild[0]['release'])

-             newestRPM = _rpmvercmp( parentevr, latestevr)

+             parentevr = (str(epoch), version, release)

+             latestevr = (

+                 str(localLatestBuild[0]['epoch']), localLatestBuild[0]['version'], localLatestBuild[0]['release'])

+             newestRPM = _rpmvercmp(parentevr, latestevr)

              logging.debug("remote evr: %s  \nlocal evr: %s \nResult: %s" % (parentevr, latestevr, newestRPM))

              if newestRPM == -1:

                  logging.info("Newer locally: %s locally is newer than remote" % (latestevr,))

@@ -192,16 +200,15 @@ 

          importBuild(nvr, rpms, buildinfo, tag=tag)

          continue

      request = remotekojisession.getTaskRequest(task_id)

-     #localkojisession.build(request[0], request[1], opts=None, priority=2)

-         

-     fname = "%s.src.rpm" %  nvr

+     # localkojisession.build(request[0], request[1], opts=None, priority=2)

+ 

+     fname = "%s.src.rpm" % nvr

      fpath = "%s/%s.src.rpm" % (workpath, nvr)

      url = "%s/packages/%s/%s/%s/src/%s" % (PACKAGEURL, name, version, release, fname)

  

- 

      if not os.path.isfile(fpath):

-         file = grabber.urlopen(url, progress_obj = pg, text = "%s" % (fname))

-         out = os.open(fpath, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0666)

+         file = grabber.urlopen(url, progress_obj=pg, text="%s" % (fname))

+         out = os.open(fpath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666)

          try:

              while 1:

                  buf = file.read(4096)

@@ -211,7 +218,7 @@ 

          finally:

              os.close(out)

              file.close()

-         

+ 

      serverdir = _unique_path('cli-build')

      localkojisession.uploadWrapper(fpath, serverdir, blocksize=65536)

      source = "%s/%s" % (serverdir, fname)

@@ -228,4 +235,3 @@ 

  

      localkojisession.build(source, target, opts=None, priority=2)

      logging.info("submitted build: %s" % nvr)

- 

@@ -12,6 +12,7 @@ 

  import os

  import argparse

  import logging

+ 

  log = logging.getLogger(__name__)

  import subprocess

  

@@ -57,6 +58,7 @@ 

          # second evr wins

          return -1

  

+ 

  koji_module = koji.get_profile_module(KOJIHUB)

  kojisession = koji_module.ClientSession(koji_module.config.server)

  kojisession.krb_login()

@@ -39,13 +39,13 @@ 

  cnt = 0

  

  print("reading content of %s tag ..." % (testing_tag))

- testing_builds = sorted(kojisession.listTagged(testing_tag), key = lambda pkg: pkg['package_name'])

+ testing_builds = sorted(kojisession.listTagged(testing_tag), key=lambda pkg: pkg['package_name'])

  for b in testing_builds:

      testing_nvrs.append(b['nvr'])

      testing_dict[b['nvr']] = b

  

  print("reading content of %s tag ..." % (ga_tag))

- ga_builds = sorted(kojisession.listTagged(ga_tag), key = lambda pkg: pkg['package_name'])

+ ga_builds = sorted(kojisession.listTagged(ga_tag), key=lambda pkg: pkg['package_name'])

  for b in ga_builds:

      ga_nvrs.append(b['nvr'])

  

@@ -54,8 +54,8 @@ 

  print("checking NVRs in both %s and %s tags ..." % (ga_tag, testing_tag))

  for b in testing_nvrs:

      if b in ga_nvrs:

- #	print("%s completed %s" % (b, testing_dict[b]['completion_time']))

- 	print("%s" % (b))

- 	cnt += 1

+         # print("%s completed %s" % (b, testing_dict[b]['completion_time']))

+         print("%s" % (b))

+         cnt += 1

  

  print("%s NVRs in both tags" % (cnt))

file modified
+120 -101

@@ -9,124 +9,143 @@ 

          Ariel Lima <alima@redhat.com> -- Red Hat Intern 2018 Summer

  """

  

- import requests#used to make requests to urls

- import argparse#we want to be able to take different inputs to format different urls

- import sys#only used to succesfully terminate script in case of error

- import re#we use this so we can easily manipulate the url

- import json#What we pull from the url will be in json format

+ import requests  # used to make requests to urls

+ import argparse  # we want to be able to take different inputs to format different urls

+ import sys  # only used to succesfully terminate script in case of error

+ import re  # we use this so we can easily manipulate the url

+ import json  # What we pull from the url will be in json format

+ 

+ from bs4 import BeautifulSoup  # we will use beautiful soup to go though an html page in search of dead.package files

  

- from bs4 import BeautifulSoup#we will use beautiful soup to go though an html page in search of dead.package files

  """

- 	Parsing:

+     Parsing:

  

- 	nms: This is the namespace, not necessary default is "rpms"

- 	pck: This is the name of the fedora package, user has to input this, has no default value

- 	brc: This is the specific branch, not necessary default is "master"

+     nms: This is the namespace, not necessary default is "rpms"

+     pck: This is the name of the fedora package, user has to input this, has no default value

+     brc: This is the specific branch, not necessary default is "master"

  """

  parser = argparse.ArgumentParser()

- parser.add_argument("--nms", help="Name of the namespace that contains package", type=str)#namespace package is located in

- parser.add_argument("pck", help="Name of the fedora package",type=str)#package name

- parser.add_argument("--brc", help="Name of the branched version of the package wanted", type=str)#name of the branched version of package wanted

+ parser.add_argument("--nms", help="Name of the namespace that contains package",

+                     type=str)  # namespace package is located in

+ parser.add_argument("pck", help="Name of the fedora package", type=str)  # package name

+ parser.add_argument("--brc", help="Name of the branched version of the package wanted",

+                     type=str)  # name of the branched version of package wanted

  args = parser.parse_args()

  

- #this is the default url used for getting contributors the url is api/0/<namespace>/<package>

- contributors_url = ("https://src.fedoraproject.org/api/0/rpms/"+args.pck)

+ # this is the default url used for getting contributors the url is api/0/<namespace>/<package>

+ contributors_url = ("https://src.fedoraproject.org/api/0/rpms/" + args.pck)

  

- #this is the default url that we will use get the slas

- slas_url = "https://pdc.fedoraproject.org/rest_api/v1/component-branches/?global_component="+args.pck+"&name=master&type=rpm"

+ # this is the default url that we will use get the slas

+ slas_url = "https://pdc.fedoraproject.org/rest_api/v1/component-branches/?global_component=" + args.pck + "&name=master&type=rpm"

  

- #this url will be the default url used to check if a package is a dead package or not

- state_url = "https://src.fedoraproject.org/rpms/"+args.pck+"/tree/master"

+ # this url will be the default url used to check if a package is a dead package or not

+ state_url = "https://src.fedoraproject.org/rpms/" + args.pck + "/tree/master"

  

  """

- 	This is where the argument parsing will happen

+     This is where the argument parsing will happen

  """

  if args.nms:

- 	#case nms argument is used we want to modify the default namespace

- 	contributors_url = re.sub("/rpms/", ("/"+args.nms+"/"), contributors_url)#I added the forward slashes as a means to attempt to minimalize errors

- 	slas_url = re.sub("type=rpm", ("type="+args.nms), slas_url)#Includes 'type' as precaution to possible packagename issues

- 	state_url = re.sub("/rpms/", ("/"+args.nms+"/"), state_url)#When a user specifies a namespace that is not defult we change it

- 	print(contributors_url, slas_url)

+     # case nms argument is used we want to modify the default namespace

+     contributors_url = re.sub("/rpms/", ("/" + args.nms + "/"),

+                               contributors_url)  # I added the forward slashes as a means to attempt to minimalize errors

+     slas_url = re.sub("type=rpm", ("type=" + args.nms),

+                       slas_url)  # Includes 'type' as precaution to possible packagename issues

+     state_url = re.sub("/rpms/", ("/" + args.nms + "/"),

+                        state_url)  # When a user specifies a namespace that is not defult we change it

+     print(contributors_url, slas_url)

  if args.brc:

- 	#case we want to change the branch we get the slas from (default is master)

- 	slas_url = re.sub("name=master", ("name="+args.brc), slas_url)#Includes 'name' as precaution to possible packagename issues

- 	state_url = re.sub("tree/master", ("tree/"+args.brc), state_url)#when a user specifies a branch that is not default we change it in the url

+     # case we want to change the branch we get the slas from (default is master)

+     slas_url = re.sub("name=master", ("name=" + args.brc),

+                       slas_url)  # Includes 'name' as precaution to possible packagename issues

+     state_url = re.sub("tree/master", ("tree/" + args.brc),

+                        state_url)  # when a user specifies a branch that is not default we change it in the url

+ 

  

  def package_contributors(url):

- 	"""

- 		This is a very simple method that will return the contributors of the package specified

- 	"""

- 	try:

- 		#This is really just to make sure that we got to the url we want

- 		#quit if there is any error

- 		response = requests.get(url)#here we have the extra step to ensure that we did not get an error (not converting straight to json)

- 		if(str(response)!="<Response [200]>"):

- 			sys.exit(0)

- 		response = response.json()

- 	except:

- 		print("ERROR: not able to find main page [package contributor method], could be due to wrong input or code update may be needed")

- 

- 	owner = response['access_users']['owner']#Current owner of this package (main_admin)

- 	admins = response['access_users']['admin']#current admins of this package in list format

- 	contributors = owner + admins#owner located at index 0, rest are admins

- 

- 	#we check to see whether it is an orphan package or not

- 	#then this is just basic outputting into a format I think looks good

- 	if(owner[0]=="orphan"):

- 		print("\n*THIS IS AN ORPHAN PACKAGE*")

- 	else:

- 		print("\nOWNER:\n-" + (contributors[0]))

- 

- 	#we check for admins, we could have this implemented into the previous if statement, I didn't because I am not fully aware of the standards for packages

- 	#we check for any admins, then format it in, case there is one

- 	if(len(admins)>=1):

- 		print("\nADMINS: ")

- 		for p in admins:

- 			print("-"+str(p))

- 

- 	return contributors#in case someone needs this for something else in the future we return the list of contributers, index 0 is owner

+     """

+         This is a very simple method that will return the contributors of the package specified

+     """

+     try:

+         # This is really just to make sure that we got to the url we want

+         # quit if there is any error

+         response = requests.get(

+             url)  # here we have the extra step to ensure that we did not get an error (not converting straight to json)

+         if (str(response) != "<Response [200]>"):

+             sys.exit(0)

+         response = response.json()

+     except:

+         print(

+             "ERROR: not able to find main page [package contributor method], could be due to wrong input or code update may be needed")

+ 

+     owner = response['access_users']['owner']  # Current owner of this package (main_admin)

+     admins = response['access_users']['admin']  # current admins of this package in list format

+     contributors = owner + admins  # owner located at index 0, rest are admins

+ 

+     # we check to see whether it is an orphan package or not

+     # then this is just basic outputting into a format I think looks good

+     if (owner[0] == "orphan"):

+         print("\n*THIS IS AN ORPHAN PACKAGE*")

+     else:

+         print("\nOWNER:\n-" + (contributors[0]))

+ 

+     # we check for admins, we could have this implemented into the previous if statement, I didn't because I am not fully aware of the standards for packages

+     # we check for any admins, then format it in, case there is one

+     if (len(admins) >= 1):

+         print("\nADMINS: ")

+         for p in admins:

+             print("-" + str(p))

+ 

+     return contributors  # in case someone needs this for something else in the future we return the list of contributers, index 0 is owner

+ 

  

  def package_slas(url):

- 	"""

- 		this returns the slas of a package

- 	"""

- 	try:

- 		#This is really just to make sure that we got to the url we want

- 		#quit if there is any error

- 		response = requests.get(url)#***here we have the extra step to ensure that we did not get an error (not converting straight to json)

- 		if(str(response)!="<Response [200]>"):

- 			sys.exit(0)

- 		response = response.json()#***here we finally convert it to json

- 	except:

- 		print("ERROR: not able to find SLA page [package_slas method], could be due to wrong input or code update may be needed")

- 

- 	response=response['results'][0]['slas']#here we specify very clearly what we want from the json object, response now becomes a list of dictionaries

- 	#From here down is just basic outputting into a format I think looks good

- 	print("\nSLAS--")

- 	for item in response[0]:

- 		print(str(item) + ":" + str(response[0][item]))

- 	print("\n")

+     """

+         this returns the slas of a package

+     """

+     try:

+         # This is really just to make sure that we got to the url we want

+         # quit if there is any error

+         response = requests.get(

+             url)  # ***here we have the extra step to ensure that we did not get an error (not converting straight to json)

+         if (str(response) != "<Response [200]>"):

+             sys.exit(0)

+         response = response.json()  # ***here we finally convert it to json

+     except:

+         print(

+             "ERROR: not able to find SLA page [package_slas method], could be due to wrong input or code update may be needed")

+ 

+     response = response['results'][0][

+         'slas']  # here we specify very clearly what we want from the json object, response now becomes a list of dictionaries

+     # From here down is just basic outputting into a format I think looks good

+     print("\nSLAS--")

+     for item in response[0]:

+         print(str(item) + ":" + str(response[0][item]))

+     print("\n")

+ 

  

  def package_state(url):

- 	"""

- 		This will simply check if the string 'dead.package' appears anywhere in the files section of this package

- 	"""

- 	try:

- 		#This is really just to make sure that we got to the url we want

- 		#quit if there is any error

- 		response = requests.get(url)

- 		if(str(response)!="<Response [200]>"):

- 			sys.exit(0)

- 		soup = BeautifulSoup(response.content, 'html.parser')#create a beautiful soup object, pretty much all I know

- 	except:

- 		print("ERROR: not able to find file url[package_state method], could be due to wrong input or code update may be needed")

- 

- 	soup = str(soup)#we will turn soup into a string object to facilitate searching for a sequence

- 

- 	if("dead.package" in soup):#search for dead.package sequence

- 		print("This package has a dead.package file\n")

- 	else:

- 		print("No dead.package file\n")

- package_contributors(contributors_url)#function call

- package_slas(slas_url)#function call

- package_state(state_url)#function call

+     """

+         This will simply check if the string 'dead.package' appears anywhere in the files section of this package

+     """

+     try:

+         # This is really just to make sure that we got to the url we want

+         # quit if there is any error

+         response = requests.get(url)

+         if (str(response) != "<Response [200]>"):

+             sys.exit(0)

+         soup = BeautifulSoup(response.content, 'html.parser')  # create a beautiful soup object, pretty much all I know

+     except:

+         print(

+             "ERROR: not able to find file url[package_state method], could be due to wrong input or code update may be needed")

+ 

+     soup = str(soup)  # we will turn soup into a string object to facilitate searching for a sequence

+ 

+     if ("dead.package" in soup):  # search for dead.package sequence

+         print("This package has a dead.package file\n")

+     else:

+         print("No dead.package file\n")

+ 

+ 

+ package_contributors(contributors_url)  # function call

+ package_slas(slas_url)  # function call

+ package_state(state_url)  # function call

file modified
+24 -17

@@ -33,6 +33,7 @@ 

  domain = '@fedoraproject.org'

  smtpserver = 'localhost'

  

+ 

  def usage():

      print("""

      check-upgrade-paths.py tag1 [/]tag2 [[/]tag3 [/]tag4]

@@ -43,21 +44,24 @@ 

      dist-f8-updates dist-f8-updates-testing /dist-f9-updates dist-f9-updates-testing

      """)

  

+ 

  def compare(pkgA, pkgB):

      pkgdictA = koji.parse_NVR(pkgA)

      pkgdictB = koji.parse_NVR(pkgB)

  

      rc = rpm.labelCompare((pkgdictA['epoch'], pkgdictA['version'], pkgdictA['release']),

-                          (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

+                           (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

  

      return rc

  

+ 

  def buildToNvr(build):

      if build['epoch']:

          return '%s:%s' % (build['epoch'], build['nvr'])

      else:

          return build['nvr']

  

+ 

  def genPackageMail(builder, package):

      """Send a mail to the package watchers and the builder regarding the break.

         Mail is set out once per broken package."""

@@ -68,7 +72,7 @@ 

  To: %s

  Subject: Broken upgrade path(s) detected for: %s

  

- """ % (fromaddr, ','.join([addy+domain for addy in addresses]), package)

+ """ % (fromaddr, ','.join([addy + domain for addy in addresses]), package)

  

      for path in badpaths[pkg]:

          msg += "    %s\n" % path

@@ -81,10 +85,11 @@ 

      try:

          server = smtplib.SMTP(smtpserver)

          server.set_debuglevel(1)

-         server.sendmail(fromaddr, [addy+domain for addy in addresses], msg)

+         server.sendmail(fromaddr, [addy + domain for addy in addresses], msg)

      except:

          print('sending mail failed')

  

+ 

  if len(sys.argv) > 1 and sys.argv[1] in ['-h', '--help', '-help', '--usage']:

      usage()

      sys.exit(0)

@@ -104,13 +109,13 @@ 

  # Remove prepended slashes and make a dict of them

  tags = []

  for tag in cmdtags:

-   if tag[0] == '/':

-     realtag = tag[1:]

-     tags.append(realtag)

-     slashdict[realtag] = True

-   else:

-     tags.append(tag)

-     slashdict[tag] = False

+     if tag[0] == '/':

+         realtag = tag[1:]

+         tags.append(realtag)

+         slashdict[realtag] = True

+     else:

+         tags.append(tag)

+         slashdict[tag] = False

  

  # Use multicall to get the latest tagged builds from each tag

  kojisession.multicall = True

@@ -133,17 +138,17 @@ 

  # Loop through the packages, compare e:n-v-rs from the first tag upwards

  # then proceed to the next given tag and again compare upwards

  for pkg in pkgdict:

-     for tag in tags[:-1]: # Skip the last tag since there is nothing to compare it to

+     for tag in tags[:-1]:  # Skip the last tag since there is nothing to compare it to

          idx = tags.index(tag)

-         for nexttag in tags[idx+1:]: # Compare from current tag up

+         for nexttag in tags[idx + 1:]:  # Compare from current tag up

              if pkgdict[pkg].has_key(tag):

-                 if pkgdict[pkg].has_key(nexttag): # only compare if the next tag knows about this package

+                 if pkgdict[pkg].has_key(nexttag):  # only compare if the next tag knows about this package

                      rc = compare(pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr'])

                      if rc <= 0:

                          continue

-                     if rc > 0 and tags.index(nexttag) == idx+1 and slashdict[nexttag] and idx+2 < len(tags):

+                     if rc > 0 and tags.index(nexttag) == idx + 1 and slashdict[nexttag] and idx + 2 < len(tags):

                          # Broken? Need to check the next tag!

-                         nextnexttag = tags[idx+2]

+                         nextnexttag = tags[idx + 2]

                          if pkgdict[pkg].has_key(nextnexttag):

                              rc = compare(pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nextnexttag]['nvr'])

                      if rc > 0:

@@ -154,8 +159,10 @@ 

                              badpathsbybuilder[pkgdict[pkg][tag]['builder']] = {}

                          if not badpathsbybuilder[pkgdict[pkg][tag]['builder']].has_key(pkg):

                              badpathsbybuilder[pkgdict[pkg][tag]['builder']][pkg] = []

-                         badpaths[pkg].append('%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

-                         badpathsbybuilder[pkgdict[pkg][tag]['builder']][pkg].append('%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

+                         badpaths[pkg].append(

+                             '%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

+                         badpathsbybuilder[pkgdict[pkg][tag]['builder']][pkg].append(

+                             '%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

  

  msg = """From: %s

  To: %s

file modified
+26 -18

@@ -18,12 +18,14 @@ 

  

  # HAAACK

  import imp

- sys.modules['repoclosure'] = imp.load_source("repoclosure","/usr/bin/repoclosure")

+ 

+ sys.modules['repoclosure'] = imp.load_source("repoclosure", "/usr/bin/repoclosure")

  import repoclosure

  

  owners = {}

  deps = {}

  

+ 

  def generateConfig(distdir, treename, arch, testing=False):

      if not os.path.exists(os.path.join(distdir, arch)):

          return None

@@ -93,9 +95,10 @@ 

  

  def libmunge(match):

      if match.groups()[1].isdigit():

-         return "%s%d" % (match.groups()[0],int(match.groups()[1])+1)

+         return "%s%d" % (match.groups()[0], int(match.groups()[1]) + 1)

      else:

-         return "%s%s" % (match.groups()[0],match.groups()[1])

+         return "%s%s" % (match.groups()[0], match.groups()[1])

+ 

  

  def addOwner(list, pkg):

      if list.get(pkg):

@@ -110,15 +113,17 @@ 

          return True

      return False

  

+ 

  def getSrcPkg(pkg):

      if pkg.arch == 'src':

-       return pkg.name

+         return pkg.name

      srpm = pkg.returnSimple('sourcerpm')

      if not srpm:

          return None

-     srcpkg = string.join(srpm.split('-')[:-2],'-')

+     srcpkg = string.join(srpm.split('-')[:-2], '-')

      return srcpkg

  

+ 

  def printableReq(pkg, dep):

      (n, f, v) = dep

      req = '%s' % n

@@ -129,17 +134,18 @@ 

          req = '%s %s' % (req, v)

      return "%s requires %s" % (pkg, req,)

  

+ 

  def assignBlame(resolver, dep, guilty):

      def __addpackages(sack):

          for package in sack.returnPackages():

              p = getSrcPkg(package)

              if addOwner(guilty, p):

                  list.append(p)

-     

+ 

      # Given a dep, find potential responsible parties

  

      list = []

-     

+ 

      # The dep itself

      list.append(dep)

  

@@ -147,16 +153,16 @@ 

      __addpackages(resolver.whatProvides(dep, None, None))

  

      # Libraries: check for variant in soname

-     if re.match("lib.*\.so\.[0-9]+",dep):

-         new = re.sub("(lib.*\.so\.)([0-9]+)",libmunge,dep)

+     if re.match("lib.*\.so\.[0-9]+", dep):

+         new = re.sub("(lib.*\.so\.)([0-9]+)", libmunge, dep)

          __addpackages(resolver.whatProvides(new, None, None))

          libname = dep.split('.')[0]

          __addpackages(resolver.whatProvides(libname, None, None))

  

      return list

  

- def generateSpam(pkgname, treename, sendmail = True):

  

+ def generateSpam(pkgname, treename, sendmail=True):

      package = deps[pkgname]

      guilty = owners[pkgname]

      conspirators = []

@@ -186,10 +192,10 @@ 

              data = data + "On %s:\n" % (arch)

              brokendeps = subpackage[arch]

              for dep in brokendeps:

-                 data = data + "\t%s\n" % printableReq(dep[0],dep[1])

+                 data = data + "\t%s\n" % printableReq(dep[0], dep[1])

  

      data = data + "Please resolve this as soon as possible.\n\n"

-     

+ 

      fromaddr = 'buildsys@fedoraproject.org'

      toaddrs = [guilty]

      if conspirators:

@@ -201,7 +207,7 @@ 

  Subject: Broken dependencies: %s

  

  %s

- """ % (fromaddr, guilty, string.join(conspirators,','), pkgname, data)

+ """ % (fromaddr, guilty, string.join(conspirators, ','), pkgname, data)

      if sendmail:

          try:

              server = smtplib.SMTP('localhost')

@@ -210,6 +216,7 @@ 

          except:

              print('sending mail failed')

  

+ 

  def doit(dir, treename, mail=True, testing=False):

      for arch in os.listdir(dir):

          conffile = generateConfig(dir, treename, arch, testing)

@@ -223,11 +230,11 @@ 

              carch = 'sparc64v'

          else:

              carch = arch

-         my = repoclosure.RepoClosure(config = conffile, arch = [carch])

+         my = repoclosure.RepoClosure(config=conffile, arch=[carch])

          cachedir = getCacheDir()

          my.repos.setCacheDir(cachedir)

          my.readMetadata()

-         baddeps = my.getBrokenDeps(newest = False)

+         baddeps = my.getBrokenDeps(newest=False)

          pkgs = baddeps.keys()

          tmplist = [(x.returnSimple('name'), x) for x in pkgs]

          tmplist.sort()

@@ -256,21 +263,22 @@ 

  

                  blamelist = assignBlame(my, n, owners)

  

-                 broken.append( (pkg, (n, f, v), blamelist) )

+                 broken.append((pkg, (n, f, v), blamelist))

  

              deps[srcpkg][pkgid][arch] = broken

  

          print("\n\n")

          os.unlink(conffile)

-         shutil.rmtree(cachedir, ignore_errors = True)

+         shutil.rmtree(cachedir, ignore_errors=True)

  

      pkglist = deps.keys()

      for pkg in pkglist:

          generateSpam(pkg, treename, mail)

  

+ 

  if __name__ == '__main__':

  

-     parser = argparse.ArgumentParser(usage = '%(prog)s [options] <directory>')

+     parser = argparse.ArgumentParser(usage='%(prog)s [options] <directory>')

      parser.add_argument("--nomail", action="store_true")

      parser.add_argument("--enable-testing", action="store_true")

      parser.add_argument("--treename", default="rawhide")

file modified
+8 -2

@@ -14,30 +14,35 @@ 

  import rpm

  import sys

  

+ 

  def usage():

      print("""

      clean-overrides.py overridetag updatetag

      """)

  

+ 

  def compare(pkgA, pkgB):

      pkgdictA = koji.parse_NVR(pkgA)

      pkgdictB = koji.parse_NVR(pkgB)

  

      rc = rpm.labelCompare((pkgdictA['epoch'], pkgdictA['version'], pkgdictA['release']),

-                          (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

+                           (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

  

      return rc

  

+ 

  def buildToNvr(build):

      if build['epoch']:

          return '%s:%s' % (build['epoch'], build['nvr'])

      else:

          return build['nvr']

  

+ 

  def printBuild(build):

      pkgdict = koji.parse_NVR(build)

      return '%s-%s-%s' % (pkgdict['name'], pkgdict['version'], pkgdict['release'])

  

+ 

  if len(sys.argv) > 1 and sys.argv[1] in ['-h', '--help', '-help', '--usage']:

      usage()

      sys.exit(0)

@@ -80,4 +85,5 @@ 

      print("")

  

  if equal or older:

-     print("Suggest: koji untag-pkg %s %s %s" % (overtag, ' '.join([printBuild(e) for e in equal]), ' '.join([printBuild(o) for o in older])))

+     print("Suggest: koji untag-pkg %s %s %s" % (

+         overtag, ' '.join([printBuild(e) for e in equal]), ' '.join([printBuild(o) for o in older])))

file modified
+43 -33

@@ -14,23 +14,26 @@ 

  from tempfile import mkdtemp

  import dnf

  

+ 

  class SackError(Exception):

      pass

  

+ 

  major_version = sys.version_info[0]

  

  # Set some constants

  # Old definition

- #critpath_groups = ['@core','@critical-path-base','@critical-path-gnome']

+ # critpath_groups = ['@core','@critical-path-base','@critical-path-gnome']

  critpath_groups = [

      '@core', '@critical-path-apps', '@critical-path-base',

      '@critical-path-gnome', '@critical-path-kde', '@critical-path-lxde',

      '@critical-path-xfce'

  ]

- primary_arches=('armhfp', 'x86_64')

- alternate_arches=('i386','aarch64','ppc64','ppc64le','s390x')

+ primary_arches = ('armhfp', 'x86_64')

+ alternate_arches = ('i386', 'aarch64', 'ppc64', 'ppc64le', 's390x')

  # There is not current a programmatic way to generate this list

- fakearch = {'i386':'i686', 'x86_64':'x86_64', 'ppc64':'ppc64', 'ppc':'ppc64', 'armhfp':'armv7hl', 'aarch64':'aarch64', 'ppc64le':'ppc64', 's390x':'s390x'}

+ fakearch = {'i386': 'i686', 'x86_64': 'x86_64', 'ppc64': 'ppc64', 'ppc': 'ppc64', 'armhfp': 'armv7hl',

+             'aarch64': 'aarch64', 'ppc64le': 'ppc64', 's390x': 's390x'}

  fedora_baseurl = 'http://dl.fedoraproject.org/pub/fedora/linux/'

  fedora_alternateurl = 'http://dl.fedoraproject.org/pub/fedora-secondary/'

  releasepath = {

@@ -42,7 +45,7 @@ 

      'rawhide': ''

  }

  

- for x in range(12,27,1):

+ for x in range(12, 27, 1):

      r = str(x)

      releasepath[r] = 'releases/%s/Everything/$basearch/os/' % r

      updatepath[r] = 'updates/%s/$basearch/' % r

@@ -53,12 +56,16 @@ 

  updatepath['branched'] = ''

  

  # blacklists

- blacklist = [ 'tzdata' ]

+ blacklist = ['tzdata']

+ 

  

  def get_source(pkg):

-     return pkg.rsplit('-',2)[0]

+     return pkg.rsplit('-', 2)[0]

+ 

  

  provides_cache = {}

+ 

+ 

  def resolve_deps(pkg, base):

      deps = []

      for prov in pkg.provides:

@@ -71,7 +78,7 @@ 

              po = base.returnPackageByDep(req)

          except yum.Errors.YumBaseError:

              print("ERROR: unresolved dep for %s of pkg %s" % (req[0],

-                   pkg.name))

+                                                               pkg.name))

              raise

          provides_cache[req] = po.name

          deps.append(po.name)

@@ -80,6 +87,7 @@ 

  

      return deps

  

+ 

  def expand_yum_critpath(my, start_list):

      name_list = []

      # Expand the start_list to a list of names

@@ -139,18 +147,20 @@ 

      my.conf.installroot = cachedir

      my.repos.disableRepo('*')

      if "/mnt/koji/compose/" not in args.url:

-         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url+releasepath[release]])

-         print("adding critpath-repo-%s at %s" % (arch, url+releasepath[release]))

+         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url + releasepath[release]])

+         print("adding critpath-repo-%s at %s" % (arch, url + releasepath[release]))

          if updatepath[release]:

-             my.add_enable_repo('critpath-repo-updates-%s' % arch, baseurls=[url+updatepath[release]])

+             my.add_enable_repo('critpath-repo-updates-%s' % arch, baseurls=[url + updatepath[release]])

      else:

-         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url+'/$basearch/os/'])

-         print("adding critpath-repo-%s at %s" % (arch, url+'/$basearch/os/'))

+         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url + '/$basearch/os/'])

+         print("adding critpath-repo-%s at %s" % (arch, url + '/$basearch/os/'))

      return (my, cachedir)

  

+ 

  def nvr(p):

      return '-'.join([p.name, p.ver, p.rel])

  

+ 

  def expand_dnf_critpath(release):

      print("Resolving %s dependencies with DNF" % arch)

      base = dnf.Base()

@@ -189,7 +199,7 @@ 

  

              # load up the comps data from configured repositories

              base.read_comps()

-             group = group.replace('@','')

+             group = group.replace('@', '')

              base.group_install(group, ['mandatory', 'default', 'optional'], strict=False)

              # resolve the groups marked in base object

              base.resolve()

@@ -197,7 +207,6 @@ 

  

          return packages

  

- 

      except Exception as ex:

          template = "An exception of type {0} occurred. Arguments:\n{1!r}"

          message = template.format(type(ex).__name__, ex.args)

@@ -210,6 +219,7 @@ 

          shutil.rmtree(temp_cache_dir)

          shutil.rmtree(temp_install_root)

  

+ 

  def solves_with_dnf(release_version):

      if release_version == 'branched':

          return True

@@ -226,25 +236,25 @@ 

  if __name__ == '__main__':

      # Option parsing

      releases = sorted(releasepath.keys())

-     parser = argparse.ArgumentParser(usage = "%%(prog)s [options] [%s]" % '|'.join(releases))

+     parser = argparse.ArgumentParser(usage="%%(prog)s [options] [%s]" % '|'.join(releases))

      parser.add_argument("--nvr", action='store_true', default=False,

-                       help="output full NVR instead of just package name")

+                         help="output full NVR instead of just package name")

      parser.add_argument("-a", "--arches", default=','.join(primary_arches),

-                       help="Primary arches to evaluate (%(default)s)")

+                         help="Primary arches to evaluate (%(default)s)")

      parser.add_argument("-s", "--altarches", default=','.join(alternate_arches),

-                       help="Alternate arches to evaluate (%(default)s)")

+                         help="Alternate arches to evaluate (%(default)s)")

      parser.add_argument("-o", "--output", default="critpath.txt",

-                       help="name of file to write critpath list (%(default)s)")

+                         help="name of file to write critpath list (%(default)s)")

      parser.add_argument("-u", "--url", default=fedora_baseurl,

-                       help="URL to Primary repos")

+                         help="URL to Primary repos")

      parser.add_argument("-r", "--alturl", default=fedora_alternateurl,

-                       help="URL to Alternate repos")

+                         help="URL to Alternate repos")

      parser.add_argument("--srpm", action='store_true', default=False,

-                       help="Output source RPMS instead of binary RPMS (for pkgdb)")

+                         help="Output source RPMS instead of binary RPMS (for pkgdb)")

      parser.add_argument("--noaltarch", action='store_true', default=False,

-                       help="Not to run for alternate architectures")

+                         help="Not to run for alternate architectures")

      parser.add_argument("--dnf", action='store_true', default=False,

-                       help="Use DNF for dependency solving")

+                         help="Use DNF for dependency solving")

      args, extras = parser.parse_known_args()

  

      # Input & Sanity Validation

@@ -258,12 +268,13 @@ 

      package_count = 0

  

      using_dnf = False

-     if (args.dnf == True) or (major_version >= 3) or solves_with_dnf(release):

+     if (args.dnf is True) or (major_version >= 3) or solves_with_dnf(release):

          using_dnf = True

  

      if not using_dnf:

          import yum

          from rpmUtils.arch import getBaseArch

+ 

          if yum.__version_info__ < (3, 2, 24) and args.arches != getBaseArch():

              print("WARNING: yum < 3.2.24 may be unable to depsolve other arches.")

              print("Get a newer yum or run this on an actual %s system." % args.arches)

@@ -274,22 +285,21 @@ 

              print("This script requires the DNF version 2.0 API.")

              sys.exit(1)

  

- 

      if args.nvr and args.srpm:

          print("ERROR: --nvr and --srpm are mutually exclusive")

          sys.exit(1)

  

      if args.url != fedora_baseurl and "/mnt/koji/compose/" not in args.url:

-         releasepath[release] = releasepath[release].replace('development/','')

+         releasepath[release] = releasepath[release].replace('development/', '')

          print("Using Base URL %s" % (args.url + releasepath[release]))

      else:

          print("Using Base URL %s" % (args.url))

  

      # Do the critpath expansion for each arch

      critpath = set()

-     for arch in check_arches+alternate_check_arches:

+     for arch in check_arches + alternate_check_arches:

          if arch in check_arches:

-             url=args.url

+             url = args.url

          elif arch in alternate_check_arches:

              if args.noaltarch:

                  continue

@@ -309,7 +319,7 @@ 

              pkgs = expand_dnf_critpath(release)

          else:

              print("Resolving %s dependencies with YUM" % arch)

-             (my, cachedir) = setup_yum(url = url, release=release, arch=arch)

+             (my, cachedir) = setup_yum(url=url, release=release, arch=arch)

              pkgs = expand_yum_critpath(my, critpath_groups)

  

          if pkgs is None:

@@ -336,11 +346,11 @@ 

                  shutil.rmtree(cachedir)

          print()

      # Write full list

-     f = open(args.output,"wb")

+     f = open(args.output, "wb")

      for packagename in sorted(critpath):

          f.write(packagename + b'\n')

      f.close()

-     if critpath == None:

+     if critpath is None:

          package_count = 0

      else:

          package_count = len(critpath)

file modified
+27 -17

@@ -8,8 +8,11 @@ 

  # Bencode parsing code from http://effbot.org/zone/bencode.htm

  

  from __future__ import print_function

- import sys, re, time, os

+ import os

  import re

+ import sys

+ import time

+ 

  

  def tokenize(text, match=re.compile("([idel])|(\d+):|(-?\d+)").match):

      i = 0

@@ -19,11 +22,12 @@ 

          i = m.end()

          if m.lastindex == 2:

              yield "s"

-             yield text[i:i+int(s)]

+             yield text[i:i + int(s)]

              i = i + int(s)

          else:

              yield s

  

+ 

  def decode_item(next, token):

      if token == "i":

          # integer: "i" value "e"

@@ -46,16 +50,18 @@ 

          raise ValueError

      return data

  

+ 

  def decode(text):

      try:

          src = tokenize(text)

          data = decode_item(src.next, src.next())

-         for token in src: # look for more tokens

+         for token in src:  # look for more tokens

              raise SyntaxError("trailing junk")

      except (AttributeError, ValueError, StopIteration):

          raise SyntaxError("syntax error")

      return data

  

+ 

  def main(argv):

      if len(argv) < 2:

          print("Usage: %s <group> <date>" % (argv[0]))

@@ -65,7 +71,8 @@ 

          date = argv[2]

      else:

          date = time.strftime("%Y-%m-%d")

-     genini(sys.stdout, ".", group,  date)

+     genini(sys.stdout, ".", group, date)

+ 

  

  def SIprefix(n):

      prefix = ["", "k", "M", "G", "T"]

@@ -75,32 +82,35 @@ 

          x = "%.1f" % (n)

          prefix.pop(0)

      return "%s%sB" % (x, prefix[0])

-    

+ 

+ 

  def torrentsize(filename):

      torrentdict = decode(open(filename).read())

      length = sum(y["length"] for y in torrentdict["info"]["files"])

-     return SIprefix(length) 

+     return SIprefix(length)

  

- def genini(output, path, group,  date):

+ 

+ def genini(output, path, group, date):

      for dirpath, dirnames, filenames in os.walk(path):

-     	dirnames.sort()

-     	filenames.sort()

-     	for f in filenames:

+         dirnames.sort()

+         filenames.sort()

+         for f in filenames:

              if not f.endswith(".torrent"):

-             	continue

- 	    filepath = os.path.join(dirpath, f)

+                 continue

+             filepath = os.path.join(dirpath, f)

              displaypath = filepath

              if displaypath.startswith(dirpath):

                  displaypath = displaypath[len(dirpath):]

              if displaypath.startswith("/"):

                  displaypath = displaypath[1:]

- 	    size = torrentsize(filepath)

- 	    output.write("[%s]\n" % (displaypath))

- 	    output.write("description=%s\n" % (f[:-8].replace("-", " ")))

+             size = torrentsize(filepath)

+             output.write("[%s]\n" % (displaypath))

+             output.write("description=%s\n" % (f[:-8].replace("-", " ")))

              output.write("size=%s\n" % (size))

- 	    output.write("releasedate=%s\n" % (date))

+             output.write("releasedate=%s\n" % (date))

              output.write("group=%s\n" % (group))

-     	    output.write("\n")

+             output.write("\n")

+ 

  

  if __name__ == "__main__":

      main(sys.argv)

file modified
+3 -3

@@ -34,10 +34,10 @@ 

                  for pkg in kojisession.listRPMs(componentBuildrootID=rootid['id']):

                      if pkg['name'] == 'binutils':

                          if pkg['version'] == '2.17.50.0.16':

-                             if not build in needbuild:

+                             if build not in needbuild:

                                  needbuild.append(build)

                          elif pkg['version'] == '2.17.50.0.17' and pkg['release'] < '7':

-                             if not build in needbuild:

+                             if build not in needbuild:

                                  needbuild.append(build)

                          else:

                              print("%s had binutils, but it was %s" % (build['nvr'], pkg['nvr']))

@@ -46,7 +46,7 @@ 

  for build in needbuild:

      for rpm in kojisession.listBuildRPMs(build['nvr']):

          if rpm['arch'] == 'ppc':

-             if not build in reallyneedbuild:

+             if build not in reallyneedbuild:

                  reallyneedbuild.append(build)

                  rebuildnames.append(build['name'])

  

file modified
+31 -20

@@ -12,10 +12,12 @@ 

  import xmlrpclib

  from argparse import ArgumentParser

  

+ 

  def _(args):

      """Stub function for translation"""

      return args

  

+ 

  def ensure_connection(session):

      try:

          ret = session.getAPIVersion()

@@ -25,12 +27,14 @@ 

          print(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))

      return True

  

+ 

  def error(msg=None, code=1):

      if msg:

          sys.stderr.write(msg + "\n")

          sys.stderr.flush()

      sys.exit(code)

  

+ 

  def compare_pkgs(pkg1, pkg2):

      """Helper function to compare two package versions

           return 1 if a > b

@@ -46,6 +50,7 @@ 

      r2 = str(pkg2['release'])

      return rpm.labelCompare((e1, v1, r1), (e2, v2, r2))

  

+ 

  def diff_changelogs(session, pkg1, pkg2):

      cl2 = session.getChangelogEntries(pkg2['build_id'])

      for x in session.getChangelogEntries(pkg1['build_id']):

@@ -54,7 +59,8 @@ 

          except ValueError:

              pass

      return cl2

-     #return session.getChangelogEntries(pkg2['build_id'], after=pkg1['completion_time'])

+     # return session.getChangelogEntries(pkg2['build_id'], after=pkg1['completion_time'])

+ 

  

  def print_hidden_packages(session, tag, opts, pkg_list=None):

      """Find and print the "hidden" packages of the given tag"""

@@ -83,7 +89,7 @@ 

          print("\nComparing %s (%d) to the following tags:" % (tag['name'], tag['id']))

          for ct in comp_tags:

              try:

-                 print("%s%s (%d)" % (" "*ct.get('currdepth',0), ct['name'], ct[ctag_id_key]))

+                 print("%s%s (%d)" % (" " * ct.get('currdepth', 0), ct['name'], ct[ctag_id_key]))

              except KeyError:

                  pass

  

@@ -91,8 +97,8 @@ 

          print("\nBuilding package lists:")

  

      # Build {package_name: pkg} list for all our tags

-     main_latest = {}    #latest by nvr

-     main_top = {}       #latest by tag ordering

+     main_latest = {}  # latest by nvr

+     main_top = {}  # latest by tag ordering

      if opts['verbose']:

          print("%s ..." % tag['name'])

      tagged_pkgs = session.listTagged(tag['id'], latest=True)

@@ -106,8 +112,8 @@ 

              continue

          main_latest[pkg['package_name']] = pkg

  

-     comp_latest = {}    #latest by nvr

-     comp_top = {}       #latest by tag ordering

+     comp_latest = {}  # latest by nvr

+     comp_top = {}  # latest by tag ordering

      for ctag in comp_tags:

          if opts['verbose']:

              print("%s ..." % ctag['name'])

@@ -120,17 +126,18 @@ 

              if pkg_list and not pkg['package_name'] in pkg_list:

                  continue

              comp_top[ctag['name']].setdefault(pkg['package_name'], pkg)

-             if comp_latest[ctag['name']].has_key(pkg['package_name']) and (compare_pkgs(pkg, comp_latest[ctag['name']][pkg['package_name']]) == -1):

+             if comp_latest[ctag['name']].has_key(pkg['package_name']) and (

+                     compare_pkgs(pkg, comp_latest[ctag['name']][pkg['package_name']]) == -1):

                  continue

              comp_latest[ctag['name']][pkg['package_name']] = pkg

  

      # Check for invalid packages

      if pkg_list and opts['verbose']:

          for pkg in pkg_list:

-             if not pkg in main_latest:

+             if pkg not in main_latest:

                  print("%s is not a valid package in tag %s" % (pkg, tag['name']))

              for ctag in comp_latest.keys():

-                 if not pkg in comp_latest[ctag]:

+                 if pkg not in comp_latest[ctag]:

                      print("%s is not a valid package in tag %s" % (pkg, ctag))

  

      if main_latest:

@@ -140,7 +147,7 @@ 

              if opts['verbose']:

                  print("\nComparing packages within %s:" % tag['name'])

              for pkg in keys:

-                 #compare latest by tag order to latest by nvr (within original tag)

+                 # compare latest by tag order to latest by nvr (within original tag)

                  if opts['debug']:

                      print("comparing %s to %s (%s)" % (main_latest[pkg], main_top[pkg], tag['name']))

                  if opts['reverse']:

@@ -161,13 +168,16 @@ 

              for ctag in comp_latest.keys():

                  if comp_latest[ctag].has_key(pkg):

                      if opts['debug']:

-                         print("comparing %s (%s) to %s (%s)" % (comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

+                         print("comparing %s (%s) to %s (%s)" % (

+                             comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

                      if opts['reverse']:

                          if (compare_pkgs(main_latest[pkg], comp_latest[ctag][pkg]) == 1):

-                             print("%s (%s) < %s (%s)" % (comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

+                             print("%s (%s) < %s (%s)" % (

+                                 comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

                      else:

                          if (compare_pkgs(main_latest[pkg], comp_latest[ctag][pkg]) == -1):

-                             print("%s (%s) > %s (%s)" % (comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

+                             print("%s (%s) > %s (%s)" % (

+                                 comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

                              if opts['changelogs']:

                                  for cl in diff_changelogs(session, main_latest[pkg], comp_latest[ctag][pkg]):

                                      print("%(date)s - %(author)s\n%(text)s\n" % cl)

@@ -176,15 +186,16 @@ 

          if opts['verbose']:

              print("Oops, no packages to compare in the main tag (%s)" % tag['name'])

  

+ 

  if __name__ == "__main__":

      usage = _("find-hidden-packages [options] tag <pkg> [<pkg>...]")

-     #usage += _("\n(Specify the --help global option for a list of other help options)")

+     # usage += _("\n(Specify the --help global option for a list of other help options)")

      parser = ArgumentParser(usage=usage)

      parser.add_argument("-v", "--verbose", action="store_true", help=_("Be verbose"))

      parser.add_argument("-d", "--debug", action="store_true", default=False,

-                       help=_("Show debugging output"))

+                         help=_("Show debugging output"))

      parser.add_argument("-s", "--server", default="http://koji.fedoraproject.org/kojihub",

-                       help=_("Url of koji XMLRPC server"))

+                         help=_("Url of koji XMLRPC server"))

      parser.add_argument("-p", "--parent", help=_("Compare against a single parent"))

      parser.add_argument("--reverse", action="store_true", help=_("Process tag's children instead of its parents"))

      parser.add_argument("--changelogs", action="store_true", help=_("Print the differing changelog entries"))

@@ -192,7 +203,7 @@ 

      parser.add_argument("--stop", help=_("Stop processing inheritance at this tag"))

      parser.add_argument("--jump", help=_("Jump from one tag to another when processing inheritance"))

  

-     args, extras  = parser.parse_known_args()

+     args, extras = parser.parse_known_args()

  

      # parse arguments

      opts = {}

@@ -210,7 +221,7 @@ 

  

      # setup server connection

      session_opts = {'debug': opts['debug']}

-     kojihub = koji.ClientSession(args.server,session_opts)

+     kojihub = koji.ClientSession(args.server, session_opts)

  

      # just quick sanity check on the args before we connect to the server

      if len(extras) < 1:

@@ -221,7 +232,7 @@ 

          ensure_connection(kojihub)

          if args.debug:

              print("Successfully connected to hub")

-     except (KeyboardInterrupt,SystemExit):

+     except (KeyboardInterrupt, SystemExit):

          pass

      except:

          if args.debug:

@@ -263,6 +274,6 @@ 

      rv = 0

      try:

          rv = print_hidden_packages(kojihub, tag, opts, pkgs)

-     except (KeyboardInterrupt,SystemExit):

+     except (KeyboardInterrupt, SystemExit):

          pass

      sys.exit(rv)

file modified
+7 -7

@@ -21,14 +21,13 @@ 

  from requests.adapters import HTTPAdapter

  from requests.packages.urllib3.util.retry import Retry

  

- 

  # Set some variables

  # Some of these could arguably be passed in as args.

- buildtag = 'f30-rebuild' # tag to check

- desttag = 'f30' # Tag where fixed builds go

- epoch = '2019-01-31 10:10:00.000000' # Date to check for failures from

- failures = {} # dict of owners to lists of packages that failed.

- failed = [] # raw list of failed packages

+ buildtag = 'f30-rebuild'  # tag to check

+ desttag = 'f30'  # Tag where fixed builds go

+ epoch = '2019-01-31 10:10:00.000000'  # Date to check for failures from

+ failures = {}  # dict of owners to lists of packages that failed.

+ failed = []  # raw list of failed packages

  ownerdataurl = 'https://src.fedoraproject.org/extras/pagure_owner_alias.json'

  

  

@@ -85,7 +84,8 @@ 

      # Check if newer build exists for package

      failbuilds = []

      for build in failtasks:

-         if ((not build['package_id'] in [goodbuild['package_id'] for goodbuild in goodbuilds]) and (not build['package_id'] in [pkg['package_id'] for pkg in pkgs])):

+         if ((not build['package_id'] in [goodbuild['package_id'] for goodbuild in goodbuilds]) and (

+                 not build['package_id'] in [pkg['package_id'] for pkg in pkgs])):

              failbuilds.append(build)

  

      # Generate taskinfo for each failed build

@@ -32,11 +32,11 @@ 

  

  try:

      import texttable

+ 

      with_table = True

  except ImportError:

      with_table = False

  

- 

  cache = dogpile.cache.make_region().configure(

      'dogpile.cache.dbm',

      expiration_time=86400,

@@ -46,12 +46,11 @@ 

  PAGURE_URL = 'https://src.fedoraproject.org'

  PAGURE_MAX_ENTRIES_PER_PAGE = 100

  

- 

  EPEL6_RELEASE = dict(

      repo='https://kojipkgs.fedoraproject.org/mash/updates/dist-6E-epel/'

-     'x86_64/',

+          'x86_64/',

      source_repo='https://kojipkgs.fedoraproject.org/mash/updates/'

-     'dist-6E-epel/SRPMS',

+                 'dist-6E-epel/SRPMS',

      tag='dist-6E-epel',

      branch='el6',

      mailto='epel-announce@lists.fedoraproject.org',

@@ -441,9 +440,7 @@ 

                              srpm_name = self.by_bin[pkg].name

                          else:

                              srpm_name = pkg.name

-                         if (srpm_name not in to_check and

-                                 srpm_name not in new_names and

-                                 srpm_name not in seen):

+                         if (srpm_name not in to_check and srpm_name not in new_names and srpm_name not in seen):

                              new_names.append(srpm_name)

                          new_srpm_names.add(srpm_name)

  

@@ -629,8 +626,7 @@ 

              (pagure_dict[o].age.days // 7) >= week_limit]

  

          if orphans_not_breaking_deps_stale:

-             eprint(f"fedretire --orphan --branch {branch} -- " +

-                    " ".join(orphans_not_breaking_deps_stale))

+             eprint(f"fedretire --orphan --branch {branch} -- " + " ".join(orphans_not_breaking_deps_stale))

  

          info += wrap_and_format(

              f"Orphans{release_text} for at least {week_limit} "

file modified
+1 -2

@@ -19,7 +19,7 @@ 

  oldtag = 'f24'

  # Create a koji session

  parser = argparse.ArgumentParser()

- parser.add_argument('-p','--koji-profile', help='Select a koji profile to use',required=True)

+ parser.add_argument('-p', '--koji-profile', help='Select a koji profile to use', required=True)

  args = parser.parse_args()

  koji_profile = args.koji_profile

  

@@ -65,4 +65,3 @@ 

  print('Tagged %s batches' % batch)

  

  result = kojisession.multiCall()

- 

file modified
+12 -11

@@ -49,12 +49,13 @@ 

      loglevel = logging.DEBUG

  elif args.quiet:

      loglevel = logging.ERROR

- else: 

+ else:

      loglevel = logging.INFO

  

  logging.basicConfig(format='%(levelname)s: %(message)s',

                      level=loglevel)

  

+ 

  def _unique_path(prefix):

      """Create a unique path fragment by appending a path component

      to prefix.  The path component will consist of a string of letter and numbers

@@ -64,7 +65,7 @@ 

      # For some reason repr(time.time()) includes 4 or 5