#8313 Make releng srcipts more pep8 compliant
Closed 2 years ago by humaton. Opened 4 years ago by humaton.
humaton/releng pep8  into  master

file modified
+3 -3
@@ -1,12 +1,12 @@ 

  PYTHON=python3

- PEP8=$(PYTHON)-pep8

+ PEP8=pycodestyle-3

  COVERAGE=coverage

  ifeq ($(PYTHON),python3)

    COVERAGE=coverage3

  endif

  

- TEST_DEPENDENCIES = python3-pep8 python3-pocketlint

- TEST_DEPENDENCIES += python3-koji fedora-cert packagedb-cli

+ TEST_DEPENDENCIES = python3-pycodestyle python3-pocketlint

+ TEST_DEPENDENCIES += python3-koji packagedb-cli

  TEST_DEPENDENCIES += python3-fedmsg-core python3-configparser

  TEST_DEPENDENCIES := $(shell echo $(sort $(TEST_DEPENDENCIES)) | uniq)

  

file modified
+3 -5
@@ -12,7 +12,6 @@ 

  import pkgdb2client

  import requests

  

- 

  log = logging.getLogger(__name__)

  RETIRING_BRANCHES = ["el6", "epel7", "f30", "master"]

  PROD_ONLY_BRANCHES = ["el6", "epel7", "f30", "master"]
@@ -28,7 +27,6 @@ 

  

  

  class SubjectSMTPHandler(logging.handlers.SMTPHandler):

- 

      subject_prefix = ""

  

      def getSubject(self, record):
@@ -216,9 +214,9 @@ 

      cmd = ["block-pkg", tag] + packages

      catch_koji_errors(cmd)

  

-     #If a package moves from EPEL to RHEL it can only be built if it is unblocked

-     #in the epel build tag. Therefore unblock all retired EPEL packages in the

-     #built tag since it does not hurt if the package does not move to RHEL.

+     # If a package moves from EPEL to RHEL it can only be built if it is unblocked

+     # in the epel build tag. Therefore unblock all retired EPEL packages in the

+     # built tag since it does not hurt if the package does not move to RHEL.

      if epel_build_tag:

          cmd = ["unblock-pkg", epel_build_tag] + packages

          catch_koji_errors(cmd)

@@ -23,11 +23,9 @@ 

  from pdc_client import PDCClient

  from yaml import dump

  

- 

  requests_cache.install_cache('modulepkg_cache')

  log = logging.getLogger(__name__)

  

- 

  MODULES_SRC_URL = "https://src.fedoraproject.org/modules/"

  PDC_DEVELOP = True

  PDC_URL_PROD = "https://pdc.fedoraproject.org/rest_api/v1/"
@@ -84,8 +82,7 @@ 

                                      for m in mods]):

              found_mods = []

              for m in mods:

-                 if (name == m.get('variant_name') and

-                    version == m.get('variant_version')):

+                 if (name == m.get('variant_name') and version == m.get('variant_version')):

                      found_mods.append(m)

  

              modules.extend(found_mods)
@@ -170,12 +167,12 @@ 

              m_dict['ref'] = mmd_rpm.ref

          if parser_args.filter_api:

              api = package if parser_args.filter_api == 1 \

-                           else parser_args.filter_api

+                 else parser_args.filter_api

              if api not in mmd.api.rpms:

                  filtered = True

          if parser_args.filter_profile:

              profile = 'default' if parser_args.filter_profile == 1 \

-                                 else parser_args.filter_profile

+                 else parser_args.filter_profile

              try:

                  mmd_profile = mmd.profiles[profile]

              except KeyError:
@@ -248,7 +245,7 @@ 

                          action="store_true", help="A more compact output")

      parser.add_argument("-s", "--short-descriptions", default=False,

                          action="store_true", help="Use short (greppable)"

-                         " descriptions")

+                                                   " descriptions")

      parser.add_argument("packages", nargs="*", metavar="package",

                          help="Package(s) to look up for depending modules")

      parser.add_argument(

file modified
+3 -2
@@ -36,7 +36,7 @@ 

      # more digits of precision than str(time.time())

      return '%s/%r.%s' % (prefix, time.time(),

                           ''.join([random.choice(string.ascii_letters)

-                                  for i in range(8)]))

+                                   for i in range(8)]))

  

  

  def _rpmvercmp((e1, v1, r1), (e2, v2, r2)):
@@ -131,6 +131,7 @@ 

      tagSuccessful(build, tag)

      return True

  

+ 

  # setup the koji session

  logging.info('Setting up koji session')

  local_koji_module = koji.get_profile_module("arm")
@@ -196,7 +197,7 @@ 

              logging.debug("Local Complete Build: %s" % nvr)

              continue

          else:

-             parentevr = (str(epoch), version,  release)

+             parentevr = (str(epoch), version, release)

              latestevr = (str(localLatestBuild[0]['epoch']),

                           localLatestBuild[0]['version'],

                           localLatestBuild[0]['release'])

file modified
+43 -37
@@ -14,7 +14,7 @@ 

  import time

  import random

  import string

- import rpm 

+ import rpm

  import shutil

  

  PACKAGEURL = 'http://kojipkgs.fedoraproject.org/'
@@ -25,6 +25,7 @@ 

  logging.basicConfig(format='%(levelname)s: %(message)s',

                      level=loglevel)

  

+ 

  def _unique_path(prefix):

      """Create a unique path fragment by appending a path component

      to prefix.  The path component will consist of a string of letter and numbers
@@ -34,43 +35,47 @@ 

      # For some reason repr(time.time()) includes 4 or 5

      # more digits of precision than str(time.time())

      return '%s/%r.%s' % (prefix, time.time(),

-                       ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+                          ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+ 

  

- def _rpmvercmp ((e1, v1, r1), (e2, v2, r2)):

+ def _rpmvercmp((e1, v1, r1), (e2, v2, r2)):

      """find out which build is newer"""

      rc = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))

      if rc == 1:

-         #first evr wins

+         # first evr wins

          return 1

      elif rc == 0:

-         #same evr

+         # same evr

          return 0

      else:

-         #second evr wins

+         # second evr wins

          return -1

  

+ 

  def isNoarch(rpms):

      if not rpms:

          return False

      noarch = False

      for rpminfo in rpms:

          if rpminfo['arch'] == 'noarch':

-             #note that we've seen a noarch rpm

+             # note that we've seen a noarch rpm

              noarch = True

          elif rpminfo['arch'] != 'src':

              return False

      return noarch

  

+ 

  def tagSuccessful(nvr, tag):

      """tag completed builds into final tags"""

      localkojisession.tagBuildBypass(tag, nvr)

      print("tagged %s to %s" % (nvr, tag))

  

+ 

  def _downloadURL(url, destf):

      """Download a url and save it to a file"""

-     file = grabber.urlopen(url, progress_obj = pg, text = "%s" % (destf))

+     file = grabber.urlopen(url, progress_obj=pg, text="%s" % (destf))

  

-     out = os.open(destf, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0666)

+     out = os.open(destf, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666)

      try:

          while 1:

              buf = file.read(4096)
@@ -81,24 +86,26 @@ 

          os.close(out)

          file.close()

  

+ 

  def _importURL(url, fn):

      """Import an rpm directly from a url"""

      serverdir = _unique_path('build-recent')

-     #TODO - would be possible, using uploadFile directly, to upload without writing locally.

-     #for now, though, just use uploadWrapper

+     # TODO - would be possible, using uploadFile directly, to upload without writing locally.

+     # for now, though, just use uploadWrapper

      koji.ensuredir(workpath)

      dst = "%s/%s" % (workpath, fn)

      print("Downloading %s to %s..." % (url, dst))

      _downloadURL(url, dst)

-     #fsrc = urllib2.urlopen(url)

-     #fdst = file(dst, 'w')

-     #shutil.copyfileobj(fsrc, fdst)

-     #fsrc.close()

-     #fdst.close()

+     # fsrc = urllib2.urlopen(url)

+     # fdst = file(dst, 'w')

+     # shutil.copyfileobj(fsrc, fdst)

+     # fsrc.close()

+     # fdst.close()

      print("Uploading %s..." % dst)

      localkojisession.uploadWrapper(dst, serverdir, blocksize=65536)

      localkojisession.importRPM(serverdir, fn)

  

+ 

  def importBuild(build, rpms, buildinfo, tag=None):

      '''import a build from remote hub'''

      for rpminfo in rpms:
@@ -111,7 +118,7 @@ 

      _importURL(url, fname)

      for rpminfo in rpms:

          if rpminfo['arch'] == 'src':

-             #already imported above

+             # already imported above

              continue

          relpath = pathinfo.rpm(rpminfo)

          url = "%s/%s" % (build_url, relpath)
@@ -122,6 +129,7 @@ 

      tagSuccessful(build, tag)

      return True

  

+ 

  # setup the koji session

  logging.info('Setting up koji session')

  local_koji_module = koji.get_profile_module("arm")
@@ -132,7 +140,7 @@ 

  

  tag = 'dist-f16'

  

- ignorelist="kernel anaconda CodeAnalyst-gui Glide3 Glide3-libGL LabPlot R-bigmemory alex alt-ergo acpid apmd apmud athcool bibtex2html biosdevname bluez-hcidump camstream ccid ccsm cdrdao cduce darcs appliance-tools cmospwd cmucl coccinelle compat-gcc-296 compiz-bcop compiz-fusion-extras compiz-fusion-unsupported compizconfig-backend-gconf compizconfig-backend-kconfig compizconfig-python cabal-install compiz-fusion coq coredumper cpufrequtils cpuid cpuspeed csisat compiz hlint dmidecode dvgrab cpphs dssi-vst librdmacm edac-utils efax efibootmgr eject elilo esc ext3grep fbset fedora-ksplice emerald minicom coolkey firecontrol firmware-addon-dell fpc fprint_demo fprintd freeipmi freetennis ghc ghc-GLUT ghc-HUnit ghc-OpenGL ghc-X11 ghc-X11-xft ghc-editline ghc-fgl ghc-ghc-paths ghc-gtk2hs ghc-haskell-src ghc-html ghc-mmap ghc-mtl ghc-parallel ghc-parsec ghc-regex-base ghc-regex-compat ghc-regex-posix ghc-stm ghc-tar ghc-haskeline ghc-xhtml ghc-xmonad-contrib ghc-zlib k3b gkrellm-wifi grub2 gnome-do-plugins ghc-haskell-src-exts gnome-pilot gnome-pilot-conduits ghc-uniplate gnu-efi gpart gphoto2 gprolog openobex gsynaptics ghc-HTTP gtksourceview-sharp jpilot-backup eclipse-cdt happy haskell-platform hdparm hevea pilot-link i2c-tools i8kutils ibmasm ifd-egate grub inkscape ghc-cgi ioport iprutils ipw2100-firmware ipw2200-firmware irda-utils irqbalance isdn4k-utils joystick jpilot flashrom kpilot ksensors ksplice latrace lazarus libavc1394 libbsr libcompizconfig libcxgb3 libdc1394 libfprint hscolour libibcm libibcommon libibverbs libiec61883 libraw1394 librtas libsmbios libspe2 libunwind libusb1 hplip libx86 lightning lrmi obexd gnome-media maxima mcelog mediawiki memtest86+ nut libbtctl mkbootdisk mldonkey mod_mono mono-basic monotone-viz msr-tools nspluginwrapper seabios obex-data-server ocaml ocaml-SDL ocaml-ancient ocaml-augeas ocaml-bisect ocaml-bitstring ocaml-cairo ocaml-calendar ocaml-camlidl ocaml-camlimages ocaml-camlp5 ocaml-camomile ocaml-cil ocaml-cmigrep ocaml-csv ocaml-cryptokit ocaml-curl ocaml-curses ocaml-dbus ocaml-deriving ocaml-expat ocaml-extlib ocaml-facile ocaml-fileutils ocaml-findlib ocaml-gettext ocaml-gsl ocaml-json-static ocaml-json-wheel ocaml-lablgl ocaml-lablgtk ocaml-lacaml ocaml-libvirt ocaml-lwt ocaml-mikmatch ocaml-mlgmpidl ocaml-mysql ocaml-newt ocaml-ocamlgraph ocaml-ocamlnet ocaml-omake ocaml-openin ocaml-ounit ocaml-p3l ocaml-pa-do ocaml-pa-monad ocaml-pcre ocaml-perl4caml ocaml-pgocaml ocaml-postgresql ocaml-preludeml ocaml-pxp ocaml-reins ocaml-res ocaml-sexplib ocaml-sqlite ocaml-ssl ocaml-type-conv ocaml-ulex ocaml-xml-light ocaml-xmlrpc-light ocaml-zip ocamldsort ohm olpc-kbdshim olpc-powerd setserial ghc-dataenc ghc-hashed-storage libdv libibmad libhid pcc xorg-x11-drv-openchrome ghc-binary system-config-kdump libibumad pidgin libcrystalhd picprog planets pmtools podsleuth powerpc-utils powerpc-utils-papr ppc64-utils microcode_ctl procbench ps3-utils pvs-sbcl numactl python-iwlib python-psyco eclipse-changelog pyxf86config openmpi pcmciautils openscada rp-pppoe rpmdepsize s3switch sbcl eclipse-rpm-editor rhythmbox opensm sound-juicer spicctrl spring-installer stapitrace statserial svgalib syslinux sysprof system-config-boot system-config-display tbb ghc-QuickCheck tpb tuxcmd tvtime unetbootin unison213 unison227 valgrind vbetool ghc-network viaideinfo yaboot virt-mem virt-top vrq wacomexpresskeys xenner why wine wraplinux wxMaxima wyrd x86info xen xfce4-sensors-plugin xmonad xorg-x11-drv-acecad xorg-x11-drv-aiptek xorg-x11-drv-apm xorg-x11-drv-ark xorg-x11-drv-ast xorg-x11-drv-chips xorg-x11-drv-cirrus xorg-x11-drv-dummy xorg-x11-drv-elographics xorg-x11-drv-evdev xorg-x11-drv-fbdev xorg-x11-drv-geode xorg-x11-drv-glint xorg-x11-drv-hyperpen xorg-x11-drv-i128 xorg-x11-drv-i740 xorg-x11-drv-intel xorg-x11-drv-ivtv xorg-x11-drv-keyboard xorg-x11-drv-mach64 xorg-x11-drv-mga xorg-x11-drv-mouse xorg-x11-drv-mutouch xorg-x11-drv-neomagic xorg-x11-drv-nv xorg-x11-drv-penmount xorg-x11-drv-r128 xorg-x11-drv-radeonhd xorg-x11-drv-rendition xorg-x11-drv-s3 xorg-x11-drv-s3virge xorg-x11-drv-savage xorg-x11-drv-siliconmotion xorg-x11-drv-sis xorg-x11-drv-sisusb xorg-x11-drv-tdfx xorg-x11-drv-trident xorg-x11-drv-tseng xorg-x11-drv-v4l xorg-x11-drv-vesa xorg-x11-drv-vmware xorg-x11-drv-void xorg-x11-drv-voodoo xsp zenon zfs-fuse xorg-x11-drv-fpit libmlx4 libmthca rxtx xorg-x11-drv-vmmouse xorg-x11-drv-synaptics xorg-x11-drv-nouveau xorg-x11-drv-ati superiotool xorg-x11-drivers xorg-x11-drv-qxl qpid-cpp xorg-x11-drv-wacom openoffice.org"

+ ignorelist = "kernel anaconda CodeAnalyst-gui Glide3 Glide3-libGL LabPlot R-bigmemory alex alt-ergo acpid apmd apmud athcool bibtex2html biosdevname bluez-hcidump camstream ccid ccsm cdrdao cduce darcs appliance-tools cmospwd cmucl coccinelle compat-gcc-296 compiz-bcop compiz-fusion-extras compiz-fusion-unsupported compizconfig-backend-gconf compizconfig-backend-kconfig compizconfig-python cabal-install compiz-fusion coq coredumper cpufrequtils cpuid cpuspeed csisat compiz hlint dmidecode dvgrab cpphs dssi-vst librdmacm edac-utils efax efibootmgr eject elilo esc ext3grep fbset fedora-ksplice emerald minicom coolkey firecontrol firmware-addon-dell fpc fprint_demo fprintd freeipmi freetennis ghc ghc-GLUT ghc-HUnit ghc-OpenGL ghc-X11 ghc-X11-xft ghc-editline ghc-fgl ghc-ghc-paths ghc-gtk2hs ghc-haskell-src ghc-html ghc-mmap ghc-mtl ghc-parallel ghc-parsec ghc-regex-base ghc-regex-compat ghc-regex-posix ghc-stm ghc-tar ghc-haskeline ghc-xhtml ghc-xmonad-contrib ghc-zlib k3b gkrellm-wifi grub2 gnome-do-plugins ghc-haskell-src-exts gnome-pilot gnome-pilot-conduits ghc-uniplate gnu-efi gpart gphoto2 gprolog openobex gsynaptics ghc-HTTP gtksourceview-sharp jpilot-backup eclipse-cdt happy haskell-platform hdparm hevea pilot-link i2c-tools i8kutils ibmasm ifd-egate grub inkscape ghc-cgi ioport iprutils ipw2100-firmware ipw2200-firmware irda-utils irqbalance isdn4k-utils joystick jpilot flashrom kpilot ksensors ksplice latrace lazarus libavc1394 libbsr libcompizconfig libcxgb3 libdc1394 libfprint hscolour libibcm libibcommon libibverbs libiec61883 libraw1394 librtas libsmbios libspe2 libunwind libusb1 hplip libx86 lightning lrmi obexd gnome-media maxima mcelog mediawiki memtest86+ nut libbtctl mkbootdisk mldonkey mod_mono mono-basic monotone-viz msr-tools nspluginwrapper seabios obex-data-server ocaml ocaml-SDL ocaml-ancient ocaml-augeas ocaml-bisect ocaml-bitstring ocaml-cairo ocaml-calendar ocaml-camlidl ocaml-camlimages ocaml-camlp5 ocaml-camomile ocaml-cil ocaml-cmigrep ocaml-csv ocaml-cryptokit ocaml-curl ocaml-curses ocaml-dbus ocaml-deriving ocaml-expat ocaml-extlib ocaml-facile ocaml-fileutils ocaml-findlib ocaml-gettext ocaml-gsl ocaml-json-static ocaml-json-wheel ocaml-lablgl ocaml-lablgtk ocaml-lacaml ocaml-libvirt ocaml-lwt ocaml-mikmatch ocaml-mlgmpidl ocaml-mysql ocaml-newt ocaml-ocamlgraph ocaml-ocamlnet ocaml-omake ocaml-openin ocaml-ounit ocaml-p3l ocaml-pa-do ocaml-pa-monad ocaml-pcre ocaml-perl4caml ocaml-pgocaml ocaml-postgresql ocaml-preludeml ocaml-pxp ocaml-reins ocaml-res ocaml-sexplib ocaml-sqlite ocaml-ssl ocaml-type-conv ocaml-ulex ocaml-xml-light ocaml-xmlrpc-light ocaml-zip ocamldsort ohm olpc-kbdshim olpc-powerd setserial ghc-dataenc ghc-hashed-storage libdv libibmad libhid pcc xorg-x11-drv-openchrome ghc-binary system-config-kdump libibumad pidgin libcrystalhd picprog planets pmtools podsleuth powerpc-utils powerpc-utils-papr ppc64-utils microcode_ctl procbench ps3-utils pvs-sbcl numactl python-iwlib python-psyco eclipse-changelog pyxf86config openmpi pcmciautils openscada rp-pppoe rpmdepsize s3switch sbcl eclipse-rpm-editor rhythmbox opensm sound-juicer spicctrl spring-installer stapitrace statserial svgalib syslinux sysprof system-config-boot system-config-display tbb ghc-QuickCheck tpb tuxcmd tvtime unetbootin unison213 unison227 valgrind vbetool ghc-network viaideinfo yaboot virt-mem virt-top vrq wacomexpresskeys xenner why wine wraplinux wxMaxima wyrd x86info xen xfce4-sensors-plugin xmonad xorg-x11-drv-acecad xorg-x11-drv-aiptek xorg-x11-drv-apm xorg-x11-drv-ark xorg-x11-drv-ast xorg-x11-drv-chips xorg-x11-drv-cirrus xorg-x11-drv-dummy xorg-x11-drv-elographics xorg-x11-drv-evdev xorg-x11-drv-fbdev xorg-x11-drv-geode xorg-x11-drv-glint xorg-x11-drv-hyperpen xorg-x11-drv-i128 xorg-x11-drv-i740 xorg-x11-drv-intel xorg-x11-drv-ivtv xorg-x11-drv-keyboard xorg-x11-drv-mach64 xorg-x11-drv-mga xorg-x11-drv-mouse xorg-x11-drv-mutouch xorg-x11-drv-neomagic xorg-x11-drv-nv xorg-x11-drv-penmount xorg-x11-drv-r128 xorg-x11-drv-radeonhd xorg-x11-drv-rendition xorg-x11-drv-s3 xorg-x11-drv-s3virge xorg-x11-drv-savage xorg-x11-drv-siliconmotion xorg-x11-drv-sis xorg-x11-drv-sisusb xorg-x11-drv-tdfx xorg-x11-drv-trident xorg-x11-drv-tseng xorg-x11-drv-v4l xorg-x11-drv-vesa xorg-x11-drv-vmware xorg-x11-drv-void xorg-x11-drv-voodoo xsp zenon zfs-fuse xorg-x11-drv-fpit libmlx4 libmthca rxtx xorg-x11-drv-vmmouse xorg-x11-drv-synaptics xorg-x11-drv-nouveau xorg-x11-drv-ati superiotool xorg-x11-drivers xorg-x11-drv-qxl qpid-cpp xorg-x11-drv-wacom openoffice.org"

  

  pkgs = remotekojisession.listPackages(tagID=tag, inherited=True)

  
@@ -147,38 +155,38 @@ 

          continue

      pkginfo = remotekojisession.listTagged(tag, inherit=True, package=pkg['package_name'])

      pkgindex = 1

-     if len(pkginfo)>1:

+     if len(pkginfo) > 1:

          logging.info("got build %s" % pkginfo[pkgindex]['nvr'])

-     elif len(pkginfo)==1:

+     elif len(pkginfo) == 1:

          pkgindex = 0

          logging.info("no previous build for %s" % pkg['package_name'])

          logging.info("reverting to current %s" % pkginfo[pkgindex]['nvr'])

      else:

-        # We apparently have 0 builds for this package!

-        logging.info("no builds for %s - skipping" % pkg['package_name'])

-        continue

+         # We apparently have 0 builds for this package!

+         logging.info("no builds for %s - skipping" % pkg['package_name'])

+         continue

      nvr = pkginfo[pkgindex]['nvr']

      name = pkginfo[pkgindex]['package_name']

      epoch = pkginfo[pkgindex]['epoch']

      version = pkginfo[pkgindex]['version']

-     release =  pkginfo[pkgindex]['release']

+     release = pkginfo[pkgindex]['release']

      build_id = pkginfo[pkgindex]['build_id']

      task_id = pkginfo[pkgindex]['task_id']

  

- 

      # check if we have the nvr built or not

      localBuild = localkojisession.getBuild(nvr)

      # if we have never built the nvr on our target hub localBuild is None localLatestBuild wil be empty as well if we have never built it

      # in which case we have nothing to compare and we need to build it

      localLatestBuild = localkojisession.getLatestBuilds(tag, package=str(pkg['package_name']))

-     if not localBuild == None and not localLatestBuild == []:

+     if localBuild is not None and not localLatestBuild == []:

          if localBuild['state'] == 1:

              logging.debug("Local Complete Build: %s" % nvr)

              continue

          else:

-             parentevr = (str(epoch), version,  release)

-             latestevr =  (str(localLatestBuild[0]['epoch']), localLatestBuild[0]['version'], localLatestBuild[0]['release'])

-             newestRPM = _rpmvercmp( parentevr, latestevr)

+             parentevr = (str(epoch), version, release)

+             latestevr = (

+                 str(localLatestBuild[0]['epoch']), localLatestBuild[0]['version'], localLatestBuild[0]['release'])

+             newestRPM = _rpmvercmp(parentevr, latestevr)

              logging.debug("remote evr: %s  \nlocal evr: %s \nResult: %s" % (parentevr, latestevr, newestRPM))

              if newestRPM == -1:

                  logging.info("Newer locally: %s locally is newer than remote" % (latestevr,))
@@ -192,16 +200,15 @@ 

          importBuild(nvr, rpms, buildinfo, tag=tag)

          continue

      request = remotekojisession.getTaskRequest(task_id)

-     #localkojisession.build(request[0], request[1], opts=None, priority=2)

-         

-     fname = "%s.src.rpm" %  nvr

+     # localkojisession.build(request[0], request[1], opts=None, priority=2)

+ 

+     fname = "%s.src.rpm" % nvr

      fpath = "%s/%s.src.rpm" % (workpath, nvr)

      url = "%s/packages/%s/%s/%s/src/%s" % (PACKAGEURL, name, version, release, fname)

  

- 

      if not os.path.isfile(fpath):

-         file = grabber.urlopen(url, progress_obj = pg, text = "%s" % (fname))

-         out = os.open(fpath, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0666)

+         file = grabber.urlopen(url, progress_obj=pg, text="%s" % (fname))

+         out = os.open(fpath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666)

          try:

              while 1:

                  buf = file.read(4096)
@@ -211,7 +218,7 @@ 

          finally:

              os.close(out)

              file.close()

-         

+ 

      serverdir = _unique_path('cli-build')

      localkojisession.uploadWrapper(fpath, serverdir, blocksize=65536)

      source = "%s/%s" % (serverdir, fname)
@@ -228,4 +235,3 @@ 

  

      localkojisession.build(source, target, opts=None, priority=2)

      logging.info("submitted build: %s" % nvr)

- 

@@ -12,6 +12,7 @@ 

  import os

  import argparse

  import logging

+ 

  log = logging.getLogger(__name__)

  import subprocess

  
@@ -57,6 +58,7 @@ 

          # second evr wins

          return -1

  

+ 

  koji_module = koji.get_profile_module(KOJIHUB)

  kojisession = koji_module.ClientSession(koji_module.config.server)

  kojisession.krb_login()

@@ -39,13 +39,13 @@ 

  cnt = 0

  

  print("reading content of %s tag ..." % (testing_tag))

- testing_builds = sorted(kojisession.listTagged(testing_tag), key = lambda pkg: pkg['package_name'])

+ testing_builds = sorted(kojisession.listTagged(testing_tag), key=lambda pkg: pkg['package_name'])

  for b in testing_builds:

      testing_nvrs.append(b['nvr'])

      testing_dict[b['nvr']] = b

  

  print("reading content of %s tag ..." % (ga_tag))

- ga_builds = sorted(kojisession.listTagged(ga_tag), key = lambda pkg: pkg['package_name'])

+ ga_builds = sorted(kojisession.listTagged(ga_tag), key=lambda pkg: pkg['package_name'])

  for b in ga_builds:

      ga_nvrs.append(b['nvr'])

  
@@ -54,8 +54,8 @@ 

  print("checking NVRs in both %s and %s tags ..." % (ga_tag, testing_tag))

  for b in testing_nvrs:

      if b in ga_nvrs:

- #	print("%s completed %s" % (b, testing_dict[b]['completion_time']))

- 	print("%s" % (b))

- 	cnt += 1

+         # print("%s completed %s" % (b, testing_dict[b]['completion_time']))

+         print("%s" % (b))

+         cnt += 1

  

  print("%s NVRs in both tags" % (cnt))

file modified
+120 -101
@@ -9,124 +9,143 @@ 

          Ariel Lima <alima@redhat.com> -- Red Hat Intern 2018 Summer

  """

  

- import requests#used to make requests to urls

- import argparse#we want to be able to take different inputs to format different urls

- import sys#only used to succesfully terminate script in case of error

- import re#we use this so we can easily manipulate the url

- import json#What we pull from the url will be in json format

+ import requests  # used to make requests to urls

+ import argparse  # we want to be able to take different inputs to format different urls

+ import sys  # only used to succesfully terminate script in case of error

+ import re  # we use this so we can easily manipulate the url

+ import json  # What we pull from the url will be in json format

+ 

+ from bs4 import BeautifulSoup  # we will use beautiful soup to go though an html page in search of dead.package files

  

- from bs4 import BeautifulSoup#we will use beautiful soup to go though an html page in search of dead.package files

  """

- 	Parsing:

+     Parsing:

  

- 	nms: This is the namespace, not necessary default is "rpms"

- 	pck: This is the name of the fedora package, user has to input this, has no default value

- 	brc: This is the specific branch, not necessary default is "master"

+     nms: This is the namespace, not necessary default is "rpms"

+     pck: This is the name of the fedora package, user has to input this, has no default value

+     brc: This is the specific branch, not necessary default is "master"

  """

  parser = argparse.ArgumentParser()

- parser.add_argument("--nms", help="Name of the namespace that contains package", type=str)#namespace package is located in

- parser.add_argument("pck", help="Name of the fedora package",type=str)#package name

- parser.add_argument("--brc", help="Name of the branched version of the package wanted", type=str)#name of the branched version of package wanted

+ parser.add_argument("--nms", help="Name of the namespace that contains package",

+                     type=str)  # namespace package is located in

+ parser.add_argument("pck", help="Name of the fedora package", type=str)  # package name

+ parser.add_argument("--brc", help="Name of the branched version of the package wanted",

+                     type=str)  # name of the branched version of package wanted

  args = parser.parse_args()

  

- #this is the default url used for getting contributors the url is api/0/<namespace>/<package>

- contributors_url = ("https://src.fedoraproject.org/api/0/rpms/"+args.pck)

+ # this is the default url used for getting contributors the url is api/0/<namespace>/<package>

+ contributors_url = ("https://src.fedoraproject.org/api/0/rpms/" + args.pck)

  

- #this is the default url that we will use get the slas

- slas_url = "https://pdc.fedoraproject.org/rest_api/v1/component-branches/?global_component="+args.pck+"&name=master&type=rpm"

+ # this is the default url that we will use get the slas

+ slas_url = "https://pdc.fedoraproject.org/rest_api/v1/component-branches/?global_component=" + args.pck + "&name=master&type=rpm"

  

- #this url will be the default url used to check if a package is a dead package or not

- state_url = "https://src.fedoraproject.org/rpms/"+args.pck+"/tree/master"

+ # this url will be the default url used to check if a package is a dead package or not

+ state_url = "https://src.fedoraproject.org/rpms/" + args.pck + "/tree/master"

  

  """

- 	This is where the argument parsing will happen

+     This is where the argument parsing will happen

  """

  if args.nms:

- 	#case nms argument is used we want to modify the default namespace

- 	contributors_url = re.sub("/rpms/", ("/"+args.nms+"/"), contributors_url)#I added the forward slashes as a means to attempt to minimalize errors

- 	slas_url = re.sub("type=rpm", ("type="+args.nms), slas_url)#Includes 'type' as precaution to possible packagename issues

- 	state_url = re.sub("/rpms/", ("/"+args.nms+"/"), state_url)#When a user specifies a namespace that is not defult we change it

- 	print(contributors_url, slas_url)

+     # case nms argument is used we want to modify the default namespace

+     contributors_url = re.sub("/rpms/", ("/" + args.nms + "/"),

+                               contributors_url)  # I added the forward slashes as a means to attempt to minimalize errors

+     slas_url = re.sub("type=rpm", ("type=" + args.nms),

+                       slas_url)  # Includes 'type' as precaution to possible packagename issues

+     state_url = re.sub("/rpms/", ("/" + args.nms + "/"),

+                        state_url)  # When a user specifies a namespace that is not defult we change it

+     print(contributors_url, slas_url)

  if args.brc:

- 	#case we want to change the branch we get the slas from (default is master)

- 	slas_url = re.sub("name=master", ("name="+args.brc), slas_url)#Includes 'name' as precaution to possible packagename issues

- 	state_url = re.sub("tree/master", ("tree/"+args.brc), state_url)#when a user specifies a branch that is not default we change it in the url

+     # case we want to change the branch we get the slas from (default is master)

+     slas_url = re.sub("name=master", ("name=" + args.brc),

+                       slas_url)  # Includes 'name' as precaution to possible packagename issues

+     state_url = re.sub("tree/master", ("tree/" + args.brc),

+                        state_url)  # when a user specifies a branch that is not default we change it in the url

+ 

  

  def package_contributors(url):

- 	"""

- 		This is a very simple method that will return the contributors of the package specified

- 	"""

- 	try:

- 		#This is really just to make sure that we got to the url we want

- 		#quit if there is any error

- 		response = requests.get(url)#here we have the extra step to ensure that we did not get an error (not converting straight to json)

- 		if(str(response)!="<Response [200]>"):

- 			sys.exit(0)

- 		response = response.json()

- 	except:

- 		print("ERROR: not able to find main page [package contributor method], could be due to wrong input or code update may be needed")

- 

- 	owner = response['access_users']['owner']#Current owner of this package (main_admin)

- 	admins = response['access_users']['admin']#current admins of this package in list format

- 	contributors = owner + admins#owner located at index 0, rest are admins

- 

- 	#we check to see whether it is an orphan package or not

- 	#then this is just basic outputting into a format I think looks good

- 	if(owner[0]=="orphan"):

- 		print("\n*THIS IS AN ORPHAN PACKAGE*")

- 	else:

- 		print("\nOWNER:\n-" + (contributors[0]))

- 

- 	#we check for admins, we could have this implemented into the previous if statement, I didn't because I am not fully aware of the standards for packages

- 	#we check for any admins, then format it in, case there is one

- 	if(len(admins)>=1):

- 		print("\nADMINS: ")

- 		for p in admins:

- 			print("-"+str(p))

- 

- 	return contributors#in case someone needs this for something else in the future we return the list of contributers, index 0 is owner

+     """

+         This is a very simple method that will return the contributors of the package specified

+     """

+     try:

+         # This is really just to make sure that we got to the url we want

+         # quit if there is any error

+         response = requests.get(

+             url)  # here we have the extra step to ensure that we did not get an error (not converting straight to json)

+         if (str(response) != "<Response [200]>"):

+             sys.exit(0)

+         response = response.json()

+     except:

+         print(

+             "ERROR: not able to find main page [package contributor method], could be due to wrong input or code update may be needed")

+ 

+     owner = response['access_users']['owner']  # Current owner of this package (main_admin)

+     admins = response['access_users']['admin']  # current admins of this package in list format

+     contributors = owner + admins  # owner located at index 0, rest are admins

+ 

+     # we check to see whether it is an orphan package or not

+     # then this is just basic outputting into a format I think looks good

+     if (owner[0] == "orphan"):

+         print("\n*THIS IS AN ORPHAN PACKAGE*")

+     else:

+         print("\nOWNER:\n-" + (contributors[0]))

+ 

+     # we check for admins, we could have this implemented into the previous if statement, I didn't because I am not fully aware of the standards for packages

+     # we check for any admins, then format it in, case there is one

+     if (len(admins) >= 1):

+         print("\nADMINS: ")

+         for p in admins:

+             print("-" + str(p))

+ 

+     return contributors  # in case someone needs this for something else in the future we return the list of contributers, index 0 is owner

+ 

  

  def package_slas(url):

- 	"""

- 		this returns the slas of a package

- 	"""

- 	try:

- 		#This is really just to make sure that we got to the url we want

- 		#quit if there is any error

- 		response = requests.get(url)#***here we have the extra step to ensure that we did not get an error (not converting straight to json)

- 		if(str(response)!="<Response [200]>"):

- 			sys.exit(0)

- 		response = response.json()#***here we finally convert it to json

- 	except:

- 		print("ERROR: not able to find SLA page [package_slas method], could be due to wrong input or code update may be needed")

- 

- 	response=response['results'][0]['slas']#here we specify very clearly what we want from the json object, response now becomes a list of dictionaries

- 	#From here down is just basic outputting into a format I think looks good

- 	print("\nSLAS--")

- 	for item in response[0]:

- 		print(str(item) + ":" + str(response[0][item]))

- 	print("\n")

+     """

+         this returns the slas of a package

+     """

+     try:

+         # This is really just to make sure that we got to the url we want

+         # quit if there is any error

+         response = requests.get(

+             url)  # ***here we have the extra step to ensure that we did not get an error (not converting straight to json)

+         if (str(response) != "<Response [200]>"):

+             sys.exit(0)

+         response = response.json()  # ***here we finally convert it to json

+     except:

+         print(

+             "ERROR: not able to find SLA page [package_slas method], could be due to wrong input or code update may be needed")

+ 

+     response = response['results'][0][

+         'slas']  # here we specify very clearly what we want from the json object, response now becomes a list of dictionaries

+     # From here down is just basic outputting into a format I think looks good

+     print("\nSLAS--")

+     for item in response[0]:

+         print(str(item) + ":" + str(response[0][item]))

+     print("\n")

+ 

  

  def package_state(url):

- 	"""

- 		This will simply check if the string 'dead.package' appears anywhere in the files section of this package

- 	"""

- 	try:

- 		#This is really just to make sure that we got to the url we want

- 		#quit if there is any error

- 		response = requests.get(url)

- 		if(str(response)!="<Response [200]>"):

- 			sys.exit(0)

- 		soup = BeautifulSoup(response.content, 'html.parser')#create a beautiful soup object, pretty much all I know

- 	except:

- 		print("ERROR: not able to find file url[package_state method], could be due to wrong input or code update may be needed")

- 

- 	soup = str(soup)#we will turn soup into a string object to facilitate searching for a sequence

- 

- 	if("dead.package" in soup):#search for dead.package sequence

- 		print("This package has a dead.package file\n")

- 	else:

- 		print("No dead.package file\n")

- package_contributors(contributors_url)#function call

- package_slas(slas_url)#function call

- package_state(state_url)#function call

+     """

+         This will simply check if the string 'dead.package' appears anywhere in the files section of this package

+     """

+     try:

+         # This is really just to make sure that we got to the url we want

+         # quit if there is any error

+         response = requests.get(url)

+         if (str(response) != "<Response [200]>"):

+             sys.exit(0)

+         soup = BeautifulSoup(response.content, 'html.parser')  # create a beautiful soup object, pretty much all I know

+     except:

+         print(

+             "ERROR: not able to find file url[package_state method], could be due to wrong input or code update may be needed")

+ 

+     soup = str(soup)  # we will turn soup into a string object to facilitate searching for a sequence

+ 

+     if ("dead.package" in soup):  # search for dead.package sequence

+         print("This package has a dead.package file\n")

+     else:

+         print("No dead.package file\n")

+ 

+ 

+ package_contributors(contributors_url)  # function call

+ package_slas(slas_url)  # function call

+ package_state(state_url)  # function call

file modified
+24 -17
@@ -33,6 +33,7 @@ 

  domain = '@fedoraproject.org'

  smtpserver = 'localhost'

  

+ 

  def usage():

      print("""

      check-upgrade-paths.py tag1 [/]tag2 [[/]tag3 [/]tag4]
@@ -43,21 +44,24 @@ 

      dist-f8-updates dist-f8-updates-testing /dist-f9-updates dist-f9-updates-testing

      """)

  

+ 

  def compare(pkgA, pkgB):

      pkgdictA = koji.parse_NVR(pkgA)

      pkgdictB = koji.parse_NVR(pkgB)

  

      rc = rpm.labelCompare((pkgdictA['epoch'], pkgdictA['version'], pkgdictA['release']),

-                          (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

+                           (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

  

      return rc

  

+ 

  def buildToNvr(build):

      if build['epoch']:

          return '%s:%s' % (build['epoch'], build['nvr'])

      else:

          return build['nvr']

  

+ 

  def genPackageMail(builder, package):

      """Send a mail to the package watchers and the builder regarding the break.

         Mail is set out once per broken package."""
@@ -68,7 +72,7 @@ 

  To: %s

  Subject: Broken upgrade path(s) detected for: %s

  

- """ % (fromaddr, ','.join([addy+domain for addy in addresses]), package)

+ """ % (fromaddr, ','.join([addy + domain for addy in addresses]), package)

  

      for path in badpaths[pkg]:

          msg += "    %s\n" % path
@@ -81,10 +85,11 @@ 

      try:

          server = smtplib.SMTP(smtpserver)

          server.set_debuglevel(1)

-         server.sendmail(fromaddr, [addy+domain for addy in addresses], msg)

+         server.sendmail(fromaddr, [addy + domain for addy in addresses], msg)

      except:

          print('sending mail failed')

  

+ 

  if len(sys.argv) > 1 and sys.argv[1] in ['-h', '--help', '-help', '--usage']:

      usage()

      sys.exit(0)
@@ -104,13 +109,13 @@ 

  # Remove prepended slashes and make a dict of them

  tags = []

  for tag in cmdtags:

-   if tag[0] == '/':

-     realtag = tag[1:]

-     tags.append(realtag)

-     slashdict[realtag] = True

-   else:

-     tags.append(tag)

-     slashdict[tag] = False

+     if tag[0] == '/':

+         realtag = tag[1:]

+         tags.append(realtag)

+         slashdict[realtag] = True

+     else:

+         tags.append(tag)

+         slashdict[tag] = False

  

  # Use multicall to get the latest tagged builds from each tag

  kojisession.multicall = True
@@ -133,17 +138,17 @@ 

  # Loop through the packages, compare e:n-v-rs from the first tag upwards

  # then proceed to the next given tag and again compare upwards

  for pkg in pkgdict:

-     for tag in tags[:-1]: # Skip the last tag since there is nothing to compare it to

+     for tag in tags[:-1]:  # Skip the last tag since there is nothing to compare it to

          idx = tags.index(tag)

-         for nexttag in tags[idx+1:]: # Compare from current tag up

+         for nexttag in tags[idx + 1:]:  # Compare from current tag up

              if pkgdict[pkg].has_key(tag):

-                 if pkgdict[pkg].has_key(nexttag): # only compare if the next tag knows about this package

+                 if pkgdict[pkg].has_key(nexttag):  # only compare if the next tag knows about this package

                      rc = compare(pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr'])

                      if rc <= 0:

                          continue

-                     if rc > 0 and tags.index(nexttag) == idx+1 and slashdict[nexttag] and idx+2 < len(tags):

+                     if rc > 0 and tags.index(nexttag) == idx + 1 and slashdict[nexttag] and idx + 2 < len(tags):

                          # Broken? Need to check the next tag!

-                         nextnexttag = tags[idx+2]

+                         nextnexttag = tags[idx + 2]

                          if pkgdict[pkg].has_key(nextnexttag):

                              rc = compare(pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nextnexttag]['nvr'])

                      if rc > 0:
@@ -154,8 +159,10 @@ 

                              badpathsbybuilder[pkgdict[pkg][tag]['builder']] = {}

                          if not badpathsbybuilder[pkgdict[pkg][tag]['builder']].has_key(pkg):

                              badpathsbybuilder[pkgdict[pkg][tag]['builder']][pkg] = []

-                         badpaths[pkg].append('%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

-                         badpathsbybuilder[pkgdict[pkg][tag]['builder']][pkg].append('%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

+                         badpaths[pkg].append(

+                             '%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

+                         badpathsbybuilder[pkgdict[pkg][tag]['builder']][pkg].append(

+                             '%s > %s (%s %s)' % (tag, nexttag, pkgdict[pkg][tag]['nvr'], pkgdict[pkg][nexttag]['nvr']))

  

  msg = """From: %s

  To: %s

file modified
+26 -18
@@ -18,12 +18,14 @@ 

  

  # HAAACK

  import imp

- sys.modules['repoclosure'] = imp.load_source("repoclosure","/usr/bin/repoclosure")

+ 

+ sys.modules['repoclosure'] = imp.load_source("repoclosure", "/usr/bin/repoclosure")

  import repoclosure

  

  owners = {}

  deps = {}

  

+ 

  def generateConfig(distdir, treename, arch, testing=False):

      if not os.path.exists(os.path.join(distdir, arch)):

          return None
@@ -93,9 +95,10 @@ 

  

  def libmunge(match):

      if match.groups()[1].isdigit():

-         return "%s%d" % (match.groups()[0],int(match.groups()[1])+1)

+         return "%s%d" % (match.groups()[0], int(match.groups()[1]) + 1)

      else:

-         return "%s%s" % (match.groups()[0],match.groups()[1])

+         return "%s%s" % (match.groups()[0], match.groups()[1])

+ 

  

  def addOwner(list, pkg):

      if list.get(pkg):
@@ -110,15 +113,17 @@ 

          return True

      return False

  

+ 

  def getSrcPkg(pkg):

      if pkg.arch == 'src':

-       return pkg.name

+         return pkg.name

      srpm = pkg.returnSimple('sourcerpm')

      if not srpm:

          return None

-     srcpkg = string.join(srpm.split('-')[:-2],'-')

+     srcpkg = string.join(srpm.split('-')[:-2], '-')

      return srcpkg

  

+ 

  def printableReq(pkg, dep):

      (n, f, v) = dep

      req = '%s' % n
@@ -129,17 +134,18 @@ 

          req = '%s %s' % (req, v)

      return "%s requires %s" % (pkg, req,)

  

+ 

  def assignBlame(resolver, dep, guilty):

      def __addpackages(sack):

          for package in sack.returnPackages():

              p = getSrcPkg(package)

              if addOwner(guilty, p):

                  list.append(p)

-     

+ 

      # Given a dep, find potential responsible parties

  

      list = []

-     

+ 

      # The dep itself

      list.append(dep)

  
@@ -147,16 +153,16 @@ 

      __addpackages(resolver.whatProvides(dep, None, None))

  

      # Libraries: check for variant in soname

-     if re.match("lib.*\.so\.[0-9]+",dep):

-         new = re.sub("(lib.*\.so\.)([0-9]+)",libmunge,dep)

+     if re.match("lib.*\.so\.[0-9]+", dep):

+         new = re.sub("(lib.*\.so\.)([0-9]+)", libmunge, dep)

          __addpackages(resolver.whatProvides(new, None, None))

          libname = dep.split('.')[0]

          __addpackages(resolver.whatProvides(libname, None, None))

  

      return list

  

- def generateSpam(pkgname, treename, sendmail = True):

  

+ def generateSpam(pkgname, treename, sendmail=True):

      package = deps[pkgname]

      guilty = owners[pkgname]

      conspirators = []
@@ -186,10 +192,10 @@ 

              data = data + "On %s:\n" % (arch)

              brokendeps = subpackage[arch]

              for dep in brokendeps:

-                 data = data + "\t%s\n" % printableReq(dep[0],dep[1])

+                 data = data + "\t%s\n" % printableReq(dep[0], dep[1])

  

      data = data + "Please resolve this as soon as possible.\n\n"

-     

+ 

      fromaddr = 'buildsys@fedoraproject.org'

      toaddrs = [guilty]

      if conspirators:
@@ -201,7 +207,7 @@ 

  Subject: Broken dependencies: %s

  

  %s

- """ % (fromaddr, guilty, string.join(conspirators,','), pkgname, data)

+ """ % (fromaddr, guilty, string.join(conspirators, ','), pkgname, data)

      if sendmail:

          try:

              server = smtplib.SMTP('localhost')
@@ -210,6 +216,7 @@ 

          except:

              print('sending mail failed')

  

+ 

  def doit(dir, treename, mail=True, testing=False):

      for arch in os.listdir(dir):

          conffile = generateConfig(dir, treename, arch, testing)
@@ -223,11 +230,11 @@ 

              carch = 'sparc64v'

          else:

              carch = arch

-         my = repoclosure.RepoClosure(config = conffile, arch = [carch])

+         my = repoclosure.RepoClosure(config=conffile, arch=[carch])

          cachedir = getCacheDir()

          my.repos.setCacheDir(cachedir)

          my.readMetadata()

-         baddeps = my.getBrokenDeps(newest = False)

+         baddeps = my.getBrokenDeps(newest=False)

          pkgs = baddeps.keys()

          tmplist = [(x.returnSimple('name'), x) for x in pkgs]

          tmplist.sort()
@@ -256,21 +263,22 @@ 

  

                  blamelist = assignBlame(my, n, owners)

  

-                 broken.append( (pkg, (n, f, v), blamelist) )

+                 broken.append((pkg, (n, f, v), blamelist))

  

              deps[srcpkg][pkgid][arch] = broken

  

          print("\n\n")

          os.unlink(conffile)

-         shutil.rmtree(cachedir, ignore_errors = True)

+         shutil.rmtree(cachedir, ignore_errors=True)

  

      pkglist = deps.keys()

      for pkg in pkglist:

          generateSpam(pkg, treename, mail)

  

+ 

  if __name__ == '__main__':

  

-     parser = argparse.ArgumentParser(usage = '%(prog)s [options] <directory>')

+     parser = argparse.ArgumentParser(usage='%(prog)s [options] <directory>')

      parser.add_argument("--nomail", action="store_true")

      parser.add_argument("--enable-testing", action="store_true")

      parser.add_argument("--treename", default="rawhide")

file modified
+8 -2
@@ -14,30 +14,35 @@ 

  import rpm

  import sys

  

+ 

  def usage():

      print("""

      clean-overrides.py overridetag updatetag

      """)

  

+ 

  def compare(pkgA, pkgB):

      pkgdictA = koji.parse_NVR(pkgA)

      pkgdictB = koji.parse_NVR(pkgB)

  

      rc = rpm.labelCompare((pkgdictA['epoch'], pkgdictA['version'], pkgdictA['release']),

-                          (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

+                           (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release']))

  

      return rc

  

+ 

  def buildToNvr(build):

      if build['epoch']:

          return '%s:%s' % (build['epoch'], build['nvr'])

      else:

          return build['nvr']

  

+ 

  def printBuild(build):

      pkgdict = koji.parse_NVR(build)

      return '%s-%s-%s' % (pkgdict['name'], pkgdict['version'], pkgdict['release'])

  

+ 

  if len(sys.argv) > 1 and sys.argv[1] in ['-h', '--help', '-help', '--usage']:

      usage()

      sys.exit(0)
@@ -80,4 +85,5 @@ 

      print("")

  

  if equal or older:

-     print("Suggest: koji untag-pkg %s %s %s" % (overtag, ' '.join([printBuild(e) for e in equal]), ' '.join([printBuild(o) for o in older])))

+     print("Suggest: koji untag-pkg %s %s %s" % (

+         overtag, ' '.join([printBuild(e) for e in equal]), ' '.join([printBuild(o) for o in older])))

file modified
+43 -33
@@ -14,23 +14,26 @@ 

  from tempfile import mkdtemp

  import dnf

  

+ 

  class SackError(Exception):

      pass

  

+ 

  major_version = sys.version_info[0]

  

  # Set some constants

  # Old definition

- #critpath_groups = ['@core','@critical-path-base','@critical-path-gnome']

+ # critpath_groups = ['@core','@critical-path-base','@critical-path-gnome']

  critpath_groups = [

      '@core', '@critical-path-apps', '@critical-path-base',

      '@critical-path-gnome', '@critical-path-kde', '@critical-path-lxde',

      '@critical-path-xfce'

  ]

- primary_arches=('armhfp', 'x86_64')

- alternate_arches=('i386','aarch64','ppc64','ppc64le','s390x')

+ primary_arches = ('armhfp', 'x86_64')

+ alternate_arches = ('i386', 'aarch64', 'ppc64', 'ppc64le', 's390x')

  # There is not current a programmatic way to generate this list

- fakearch = {'i386':'i686', 'x86_64':'x86_64', 'ppc64':'ppc64', 'ppc':'ppc64', 'armhfp':'armv7hl', 'aarch64':'aarch64', 'ppc64le':'ppc64', 's390x':'s390x'}

+ fakearch = {'i386': 'i686', 'x86_64': 'x86_64', 'ppc64': 'ppc64', 'ppc': 'ppc64', 'armhfp': 'armv7hl',

+             'aarch64': 'aarch64', 'ppc64le': 'ppc64', 's390x': 's390x'}

  fedora_baseurl = 'http://dl.fedoraproject.org/pub/fedora/linux/'

  fedora_alternateurl = 'http://dl.fedoraproject.org/pub/fedora-secondary/'

  releasepath = {
@@ -42,7 +45,7 @@ 

      'rawhide': ''

  }

  

- for x in range(12,27,1):

+ for x in range(12, 27, 1):

      r = str(x)

      releasepath[r] = 'releases/%s/Everything/$basearch/os/' % r

      updatepath[r] = 'updates/%s/$basearch/' % r
@@ -53,12 +56,16 @@ 

  updatepath['branched'] = ''

  

  # blacklists

- blacklist = [ 'tzdata' ]

+ blacklist = ['tzdata']

+ 

  

  def get_source(pkg):

-     return pkg.rsplit('-',2)[0]

+     return pkg.rsplit('-', 2)[0]

+ 

  

  provides_cache = {}

+ 

+ 

  def resolve_deps(pkg, base):

      deps = []

      for prov in pkg.provides:
@@ -71,7 +78,7 @@ 

              po = base.returnPackageByDep(req)

          except yum.Errors.YumBaseError:

              print("ERROR: unresolved dep for %s of pkg %s" % (req[0],

-                   pkg.name))

+                                                               pkg.name))

              raise

          provides_cache[req] = po.name

          deps.append(po.name)
@@ -80,6 +87,7 @@ 

  

      return deps

  

+ 

  def expand_yum_critpath(my, start_list):

      name_list = []

      # Expand the start_list to a list of names
@@ -139,18 +147,20 @@ 

      my.conf.installroot = cachedir

      my.repos.disableRepo('*')

      if "/mnt/koji/compose/" not in args.url:

-         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url+releasepath[release]])

-         print("adding critpath-repo-%s at %s" % (arch, url+releasepath[release]))

+         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url + releasepath[release]])

+         print("adding critpath-repo-%s at %s" % (arch, url + releasepath[release]))

          if updatepath[release]:

-             my.add_enable_repo('critpath-repo-updates-%s' % arch, baseurls=[url+updatepath[release]])

+             my.add_enable_repo('critpath-repo-updates-%s' % arch, baseurls=[url + updatepath[release]])

      else:

-         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url+'/$basearch/os/'])

-         print("adding critpath-repo-%s at %s" % (arch, url+'/$basearch/os/'))

+         my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url + '/$basearch/os/'])

+         print("adding critpath-repo-%s at %s" % (arch, url + '/$basearch/os/'))

      return (my, cachedir)

  

+ 

  def nvr(p):

      return '-'.join([p.name, p.ver, p.rel])

  

+ 

  def expand_dnf_critpath(release):

      print("Resolving %s dependencies with DNF" % arch)

      base = dnf.Base()
@@ -189,7 +199,7 @@ 

  

              # load up the comps data from configured repositories

              base.read_comps()

-             group = group.replace('@','')

+             group = group.replace('@', '')

              base.group_install(group, ['mandatory', 'default', 'optional'], strict=False)

              # resolve the groups marked in base object

              base.resolve()
@@ -197,7 +207,6 @@ 

  

          return packages

  

- 

      except Exception as ex:

          template = "An exception of type {0} occurred. Arguments:\n{1!r}"

          message = template.format(type(ex).__name__, ex.args)
@@ -210,6 +219,7 @@ 

          shutil.rmtree(temp_cache_dir)

          shutil.rmtree(temp_install_root)

  

+ 

  def solves_with_dnf(release_version):

      if release_version == 'branched':

          return True
@@ -226,25 +236,25 @@ 

  if __name__ == '__main__':

      # Option parsing

      releases = sorted(releasepath.keys())

-     parser = argparse.ArgumentParser(usage = "%%(prog)s [options] [%s]" % '|'.join(releases))

+     parser = argparse.ArgumentParser(usage="%%(prog)s [options] [%s]" % '|'.join(releases))

      parser.add_argument("--nvr", action='store_true', default=False,

-                       help="output full NVR instead of just package name")

+                         help="output full NVR instead of just package name")

      parser.add_argument("-a", "--arches", default=','.join(primary_arches),

-                       help="Primary arches to evaluate (%(default)s)")

+                         help="Primary arches to evaluate (%(default)s)")

      parser.add_argument("-s", "--altarches", default=','.join(alternate_arches),

-                       help="Alternate arches to evaluate (%(default)s)")

+                         help="Alternate arches to evaluate (%(default)s)")

      parser.add_argument("-o", "--output", default="critpath.txt",

-                       help="name of file to write critpath list (%(default)s)")

+                         help="name of file to write critpath list (%(default)s)")

      parser.add_argument("-u", "--url", default=fedora_baseurl,

-                       help="URL to Primary repos")

+                         help="URL to Primary repos")

      parser.add_argument("-r", "--alturl", default=fedora_alternateurl,

-                       help="URL to Alternate repos")

+                         help="URL to Alternate repos")

      parser.add_argument("--srpm", action='store_true', default=False,

-                       help="Output source RPMS instead of binary RPMS (for pkgdb)")

+                         help="Output source RPMS instead of binary RPMS (for pkgdb)")

      parser.add_argument("--noaltarch", action='store_true', default=False,

-                       help="Not to run for alternate architectures")

+                         help="Not to run for alternate architectures")

      parser.add_argument("--dnf", action='store_true', default=False,

-                       help="Use DNF for dependency solving")

+                         help="Use DNF for dependency solving")

      args, extras = parser.parse_known_args()

  

      # Input & Sanity Validation
@@ -258,12 +268,13 @@ 

      package_count = 0

  

      using_dnf = False

-     if (args.dnf == True) or (major_version >= 3) or solves_with_dnf(release):

+     if (args.dnf is True) or (major_version >= 3) or solves_with_dnf(release):

          using_dnf = True

  

      if not using_dnf:

          import yum

          from rpmUtils.arch import getBaseArch

+ 

          if yum.__version_info__ < (3, 2, 24) and args.arches != getBaseArch():

              print("WARNING: yum < 3.2.24 may be unable to depsolve other arches.")

              print("Get a newer yum or run this on an actual %s system." % args.arches)
@@ -274,22 +285,21 @@ 

              print("This script requires the DNF version 2.0 API.")

              sys.exit(1)

  

- 

      if args.nvr and args.srpm:

          print("ERROR: --nvr and --srpm are mutually exclusive")

          sys.exit(1)

  

      if args.url != fedora_baseurl and "/mnt/koji/compose/" not in args.url:

-         releasepath[release] = releasepath[release].replace('development/','')

+         releasepath[release] = releasepath[release].replace('development/', '')

          print("Using Base URL %s" % (args.url + releasepath[release]))

      else:

          print("Using Base URL %s" % (args.url))

  

      # Do the critpath expansion for each arch

      critpath = set()

-     for arch in check_arches+alternate_check_arches:

+     for arch in check_arches + alternate_check_arches:

          if arch in check_arches:

-             url=args.url

+             url = args.url

          elif arch in alternate_check_arches:

              if args.noaltarch:

                  continue
@@ -309,7 +319,7 @@ 

              pkgs = expand_dnf_critpath(release)

          else:

              print("Resolving %s dependencies with YUM" % arch)

-             (my, cachedir) = setup_yum(url = url, release=release, arch=arch)

+             (my, cachedir) = setup_yum(url=url, release=release, arch=arch)

              pkgs = expand_yum_critpath(my, critpath_groups)

  

          if pkgs is None:
@@ -336,11 +346,11 @@ 

                  shutil.rmtree(cachedir)

          print()

      # Write full list

-     f = open(args.output,"wb")

+     f = open(args.output, "wb")

      for packagename in sorted(critpath):

          f.write(packagename + b'\n')

      f.close()

-     if critpath == None:

+     if critpath is None:

          package_count = 0

      else:

          package_count = len(critpath)

file modified
+27 -17
@@ -8,8 +8,11 @@ 

  # Bencode parsing code from http://effbot.org/zone/bencode.htm

  

  from __future__ import print_function

- import sys, re, time, os

+ import os

  import re

+ import sys

+ import time

+ 

  

  def tokenize(text, match=re.compile("([idel])|(\d+):|(-?\d+)").match):

      i = 0
@@ -19,11 +22,12 @@ 

          i = m.end()

          if m.lastindex == 2:

              yield "s"

-             yield text[i:i+int(s)]

+             yield text[i:i + int(s)]

              i = i + int(s)

          else:

              yield s

  

+ 

  def decode_item(next, token):

      if token == "i":

          # integer: "i" value "e"
@@ -46,16 +50,18 @@ 

          raise ValueError

      return data

  

+ 

  def decode(text):

      try:

          src = tokenize(text)

          data = decode_item(src.next, src.next())

-         for token in src: # look for more tokens

+         for token in src:  # look for more tokens

              raise SyntaxError("trailing junk")

      except (AttributeError, ValueError, StopIteration):

          raise SyntaxError("syntax error")

      return data

  

+ 

  def main(argv):

      if len(argv) < 2:

          print("Usage: %s <group> <date>" % (argv[0]))
@@ -65,7 +71,8 @@ 

          date = argv[2]

      else:

          date = time.strftime("%Y-%m-%d")

-     genini(sys.stdout, ".", group,  date)

+     genini(sys.stdout, ".", group, date)

+ 

  

  def SIprefix(n):

      prefix = ["", "k", "M", "G", "T"]
@@ -75,32 +82,35 @@ 

          x = "%.1f" % (n)

          prefix.pop(0)

      return "%s%sB" % (x, prefix[0])

-    

+ 

+ 

  def torrentsize(filename):

      torrentdict = decode(open(filename).read())

      length = sum(y["length"] for y in torrentdict["info"]["files"])

-     return SIprefix(length) 

+     return SIprefix(length)

  

- def genini(output, path, group,  date):

+ 

+ def genini(output, path, group, date):

      for dirpath, dirnames, filenames in os.walk(path):

-     	dirnames.sort()

-     	filenames.sort()

-     	for f in filenames:

+         dirnames.sort()

+         filenames.sort()

+         for f in filenames:

              if not f.endswith(".torrent"):

-             	continue

- 	    filepath = os.path.join(dirpath, f)

+                 continue

+             filepath = os.path.join(dirpath, f)

              displaypath = filepath

              if displaypath.startswith(dirpath):

                  displaypath = displaypath[len(dirpath):]

              if displaypath.startswith("/"):

                  displaypath = displaypath[1:]

- 	    size = torrentsize(filepath)

- 	    output.write("[%s]\n" % (displaypath))

- 	    output.write("description=%s\n" % (f[:-8].replace("-", " ")))

+             size = torrentsize(filepath)

+             output.write("[%s]\n" % (displaypath))

+             output.write("description=%s\n" % (f[:-8].replace("-", " ")))

              output.write("size=%s\n" % (size))

- 	    output.write("releasedate=%s\n" % (date))

+             output.write("releasedate=%s\n" % (date))

              output.write("group=%s\n" % (group))

-     	    output.write("\n")

+             output.write("\n")

+ 

  

  if __name__ == "__main__":

      main(sys.argv)

file modified
+3 -3
@@ -34,10 +34,10 @@ 

                  for pkg in kojisession.listRPMs(componentBuildrootID=rootid['id']):

                      if pkg['name'] == 'binutils':

                          if pkg['version'] == '2.17.50.0.16':

-                             if not build in needbuild:

+                             if build not in needbuild:

                                  needbuild.append(build)

                          elif pkg['version'] == '2.17.50.0.17' and pkg['release'] < '7':

-                             if not build in needbuild:

+                             if build not in needbuild:

                                  needbuild.append(build)

                          else:

                              print("%s had binutils, but it was %s" % (build['nvr'], pkg['nvr']))
@@ -46,7 +46,7 @@ 

  for build in needbuild:

      for rpm in kojisession.listBuildRPMs(build['nvr']):

          if rpm['arch'] == 'ppc':

-             if not build in reallyneedbuild:

+             if build not in reallyneedbuild:

                  reallyneedbuild.append(build)

                  rebuildnames.append(build['name'])

  

file modified
+31 -20
@@ -12,10 +12,12 @@ 

  import xmlrpclib

  from argparse import ArgumentParser

  

+ 

  def _(args):

      """Stub function for translation"""

      return args

  

+ 

  def ensure_connection(session):

      try:

          ret = session.getAPIVersion()
@@ -25,12 +27,14 @@ 

          print(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))

      return True

  

+ 

  def error(msg=None, code=1):

      if msg:

          sys.stderr.write(msg + "\n")

          sys.stderr.flush()

      sys.exit(code)

  

+ 

  def compare_pkgs(pkg1, pkg2):

      """Helper function to compare two package versions

           return 1 if a > b
@@ -46,6 +50,7 @@ 

      r2 = str(pkg2['release'])

      return rpm.labelCompare((e1, v1, r1), (e2, v2, r2))

  

+ 

  def diff_changelogs(session, pkg1, pkg2):

      cl2 = session.getChangelogEntries(pkg2['build_id'])

      for x in session.getChangelogEntries(pkg1['build_id']):
@@ -54,7 +59,8 @@ 

          except ValueError:

              pass

      return cl2

-     #return session.getChangelogEntries(pkg2['build_id'], after=pkg1['completion_time'])

+     # return session.getChangelogEntries(pkg2['build_id'], after=pkg1['completion_time'])

+ 

  

  def print_hidden_packages(session, tag, opts, pkg_list=None):

      """Find and print the "hidden" packages of the given tag"""
@@ -83,7 +89,7 @@ 

          print("\nComparing %s (%d) to the following tags:" % (tag['name'], tag['id']))

          for ct in comp_tags:

              try:

-                 print("%s%s (%d)" % (" "*ct.get('currdepth',0), ct['name'], ct[ctag_id_key]))

+                 print("%s%s (%d)" % (" " * ct.get('currdepth', 0), ct['name'], ct[ctag_id_key]))

              except KeyError:

                  pass

  
@@ -91,8 +97,8 @@ 

          print("\nBuilding package lists:")

  

      # Build {package_name: pkg} list for all our tags

-     main_latest = {}    #latest by nvr

-     main_top = {}       #latest by tag ordering

+     main_latest = {}  # latest by nvr

+     main_top = {}  # latest by tag ordering

      if opts['verbose']:

          print("%s ..." % tag['name'])

      tagged_pkgs = session.listTagged(tag['id'], latest=True)
@@ -106,8 +112,8 @@ 

              continue

          main_latest[pkg['package_name']] = pkg

  

-     comp_latest = {}    #latest by nvr

-     comp_top = {}       #latest by tag ordering

+     comp_latest = {}  # latest by nvr

+     comp_top = {}  # latest by tag ordering

      for ctag in comp_tags:

          if opts['verbose']:

              print("%s ..." % ctag['name'])
@@ -120,17 +126,18 @@ 

              if pkg_list and not pkg['package_name'] in pkg_list:

                  continue

              comp_top[ctag['name']].setdefault(pkg['package_name'], pkg)

-             if comp_latest[ctag['name']].has_key(pkg['package_name']) and (compare_pkgs(pkg, comp_latest[ctag['name']][pkg['package_name']]) == -1):

+             if comp_latest[ctag['name']].has_key(pkg['package_name']) and (

+                     compare_pkgs(pkg, comp_latest[ctag['name']][pkg['package_name']]) == -1):

                  continue

              comp_latest[ctag['name']][pkg['package_name']] = pkg

  

      # Check for invalid packages

      if pkg_list and opts['verbose']:

          for pkg in pkg_list:

-             if not pkg in main_latest:

+             if pkg not in main_latest:

                  print("%s is not a valid package in tag %s" % (pkg, tag['name']))

              for ctag in comp_latest.keys():

-                 if not pkg in comp_latest[ctag]:

+                 if pkg not in comp_latest[ctag]:

                      print("%s is not a valid package in tag %s" % (pkg, ctag))

  

      if main_latest:
@@ -140,7 +147,7 @@ 

              if opts['verbose']:

                  print("\nComparing packages within %s:" % tag['name'])

              for pkg in keys:

-                 #compare latest by tag order to latest by nvr (within original tag)

+                 # compare latest by tag order to latest by nvr (within original tag)

                  if opts['debug']:

                      print("comparing %s to %s (%s)" % (main_latest[pkg], main_top[pkg], tag['name']))

                  if opts['reverse']:
@@ -161,13 +168,16 @@ 

              for ctag in comp_latest.keys():

                  if comp_latest[ctag].has_key(pkg):

                      if opts['debug']:

-                         print("comparing %s (%s) to %s (%s)" % (comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

+                         print("comparing %s (%s) to %s (%s)" % (

+                             comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

                      if opts['reverse']:

                          if (compare_pkgs(main_latest[pkg], comp_latest[ctag][pkg]) == 1):

-                             print("%s (%s) < %s (%s)" % (comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

+                             print("%s (%s) < %s (%s)" % (

+                                 comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

                      else:

                          if (compare_pkgs(main_latest[pkg], comp_latest[ctag][pkg]) == -1):

-                             print("%s (%s) > %s (%s)" % (comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

+                             print("%s (%s) > %s (%s)" % (

+                                 comp_latest[ctag][pkg]['nvr'], ctag, main_latest[pkg]['nvr'], tag['name']))

                              if opts['changelogs']:

                                  for cl in diff_changelogs(session, main_latest[pkg], comp_latest[ctag][pkg]):

                                      print("%(date)s - %(author)s\n%(text)s\n" % cl)
@@ -176,15 +186,16 @@ 

          if opts['verbose']:

              print("Oops, no packages to compare in the main tag (%s)" % tag['name'])

  

+ 

  if __name__ == "__main__":

      usage = _("find-hidden-packages [options] tag <pkg> [<pkg>...]")

-     #usage += _("\n(Specify the --help global option for a list of other help options)")

+     # usage += _("\n(Specify the --help global option for a list of other help options)")

      parser = ArgumentParser(usage=usage)

      parser.add_argument("-v", "--verbose", action="store_true", help=_("Be verbose"))

      parser.add_argument("-d", "--debug", action="store_true", default=False,

-                       help=_("Show debugging output"))

+                         help=_("Show debugging output"))

      parser.add_argument("-s", "--server", default="http://koji.fedoraproject.org/kojihub",

-                       help=_("Url of koji XMLRPC server"))

+                         help=_("Url of koji XMLRPC server"))

      parser.add_argument("-p", "--parent", help=_("Compare against a single parent"))

      parser.add_argument("--reverse", action="store_true", help=_("Process tag's children instead of its parents"))

      parser.add_argument("--changelogs", action="store_true", help=_("Print the differing changelog entries"))
@@ -192,7 +203,7 @@ 

      parser.add_argument("--stop", help=_("Stop processing inheritance at this tag"))

      parser.add_argument("--jump", help=_("Jump from one tag to another when processing inheritance"))

  

-     args, extras  = parser.parse_known_args()

+     args, extras = parser.parse_known_args()

  

      # parse arguments

      opts = {}
@@ -210,7 +221,7 @@ 

  

      # setup server connection

      session_opts = {'debug': opts['debug']}

-     kojihub = koji.ClientSession(args.server,session_opts)

+     kojihub = koji.ClientSession(args.server, session_opts)

  

      # just quick sanity check on the args before we connect to the server

      if len(extras) < 1:
@@ -221,7 +232,7 @@ 

          ensure_connection(kojihub)

          if args.debug:

              print("Successfully connected to hub")

-     except (KeyboardInterrupt,SystemExit):

+     except (KeyboardInterrupt, SystemExit):

          pass

      except:

          if args.debug:
@@ -263,6 +274,6 @@ 

      rv = 0

      try:

          rv = print_hidden_packages(kojihub, tag, opts, pkgs)

-     except (KeyboardInterrupt,SystemExit):

+     except (KeyboardInterrupt, SystemExit):

          pass

      sys.exit(rv)

file modified
+7 -7
@@ -21,14 +21,13 @@ 

  from requests.adapters import HTTPAdapter

  from requests.packages.urllib3.util.retry import Retry

  

- 

  # Set some variables

  # Some of these could arguably be passed in as args.

- buildtag = 'f30-rebuild' # tag to check

- desttag = 'f30' # Tag where fixed builds go

- epoch = '2019-01-31 10:10:00.000000' # Date to check for failures from

- failures = {} # dict of owners to lists of packages that failed.

- failed = [] # raw list of failed packages

+ buildtag = 'f30-rebuild'  # tag to check

+ desttag = 'f30'  # Tag where fixed builds go

+ epoch = '2019-01-31 10:10:00.000000'  # Date to check for failures from

+ failures = {}  # dict of owners to lists of packages that failed.

+ failed = []  # raw list of failed packages

  ownerdataurl = 'https://src.fedoraproject.org/extras/pagure_owner_alias.json'

  

  
@@ -85,7 +84,8 @@ 

      # Check if newer build exists for package

      failbuilds = []

      for build in failtasks:

-         if ((not build['package_id'] in [goodbuild['package_id'] for goodbuild in goodbuilds]) and (not build['package_id'] in [pkg['package_id'] for pkg in pkgs])):

+         if ((not build['package_id'] in [goodbuild['package_id'] for goodbuild in goodbuilds]) and (

+                 not build['package_id'] in [pkg['package_id'] for pkg in pkgs])):

              failbuilds.append(build)

  

      # Generate taskinfo for each failed build

@@ -32,11 +32,11 @@ 

  

  try:

      import texttable

+ 

      with_table = True

  except ImportError:

      with_table = False

  

- 

  cache = dogpile.cache.make_region().configure(

      'dogpile.cache.dbm',

      expiration_time=86400,
@@ -46,12 +46,11 @@ 

  PAGURE_URL = 'https://src.fedoraproject.org'

  PAGURE_MAX_ENTRIES_PER_PAGE = 100

  

- 

  EPEL6_RELEASE = dict(

      repo='https://kojipkgs.fedoraproject.org/mash/updates/dist-6E-epel/'

-     'x86_64/',

+          'x86_64/',

      source_repo='https://kojipkgs.fedoraproject.org/mash/updates/'

-     'dist-6E-epel/SRPMS',

+                 'dist-6E-epel/SRPMS',

      tag='dist-6E-epel',

      branch='el6',

      mailto='epel-announce@lists.fedoraproject.org',
@@ -441,9 +440,7 @@ 

                              srpm_name = self.by_bin[pkg].name

                          else:

                              srpm_name = pkg.name

-                         if (srpm_name not in to_check and

-                                 srpm_name not in new_names and

-                                 srpm_name not in seen):

+                         if (srpm_name not in to_check and srpm_name not in new_names and srpm_name not in seen):

                              new_names.append(srpm_name)

                          new_srpm_names.add(srpm_name)

  
@@ -629,8 +626,7 @@ 

              (pagure_dict[o].age.days // 7) >= week_limit]

  

          if orphans_not_breaking_deps_stale:

-             eprint(f"fedretire --orphan --branch {branch} -- " +

-                    " ".join(orphans_not_breaking_deps_stale))

+             eprint(f"fedretire --orphan --branch {branch} -- " + " ".join(orphans_not_breaking_deps_stale))

  

          info += wrap_and_format(

              f"Orphans{release_text} for at least {week_limit} "

file modified
+1 -2
@@ -19,7 +19,7 @@ 

  oldtag = 'f24'

  # Create a koji session

  parser = argparse.ArgumentParser()

- parser.add_argument('-p','--koji-profile', help='Select a koji profile to use',required=True)

+ parser.add_argument('-p', '--koji-profile', help='Select a koji profile to use', required=True)

  args = parser.parse_args()

  koji_profile = args.koji_profile

  
@@ -65,4 +65,3 @@ 

  print('Tagged %s batches' % batch)

  

  result = kojisession.multiCall()

- 

file modified
+12 -11
@@ -49,12 +49,13 @@ 

      loglevel = logging.DEBUG

  elif args.quiet:

      loglevel = logging.ERROR

- else: 

+ else:

      loglevel = logging.INFO

  

  logging.basicConfig(format='%(levelname)s: %(message)s',

                      level=loglevel)

  

+ 

  def _unique_path(prefix):

      """Create a unique path fragment by appending a path component

      to prefix.  The path component will consist of a string of letter and numbers
@@ -64,7 +65,7 @@ 

      # For some reason repr(time.time()) includes 4 or 5

      # more digits of precision than str(time.time())

      return '%s/%r.%s' % (prefix, time.time(),

-                       ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+                          ''.join([random.choice(string.ascii_letters) for i in range(8)]))

  

  

  # setup the koji session
@@ -88,16 +89,16 @@ 

  

      logging.debug("build=%s" % (buildinfo))

  

-     if buildinfo == None:

- 	logging.critical("build %s doesn't exist" % (build))

- 	break

- 

+     if buildinfo is None:

+         logging.critical("build %s doesn't exist" % (build))

+         break

  

      fname = "%s.src.rpm" % buildinfo['nvr']

-     url = "%s/packages/%s/%s/%s/src/%s" % (PACKAGEURL, buildinfo['package_name'], buildinfo['version'], buildinfo['release'], fname)

+     url = "%s/packages/%s/%s/%s/src/%s" % (

+         PACKAGEURL, buildinfo['package_name'], buildinfo['version'], buildinfo['release'], fname)

  

      if not os.path.isfile(fname):

- 	file = grabber.urlgrab(url, progress_obj = pg, text = "%s" % (fname))

+         file = grabber.urlgrab(url, progress_obj=pg, text="%s" % (fname))

  

      serverdir = _unique_path('cli-build')

      logging.info("uploading %s ..." % (build))
@@ -105,10 +106,10 @@ 

      source = "%s/%s" % (serverdir, fname)

  

      if args.scratch:

- 	opts = {}

- 	opts['scratch'] = True

+         opts = {}

+         opts['scratch'] = True

      else:

- 	opts = None

+         opts = None

  

      localkojisession.build(source, args.tag, opts=opts, priority=2)

  

file modified
+42 -40
@@ -15,7 +15,7 @@ 

  import koji

  import time

  import string

- import rpm 

+ import rpm

  import shutil

  

  inherit = False
@@ -30,11 +30,11 @@ 

      print("Usage: %s <arch> <tag>" % sys.argv[0])

      exit(0)

  

- 

  LOCALKOJIHUB = 'https://%s.koji.fedoraproject.org/kojihub' % (SECONDARY_ARCH)

  REMOTEKOJIHUB = 'https://koji.fedoraproject.org/kojihub'

  

- def _rpmvercmp ((e1, v1, r1), (e2, v2, r2)):

+ 

+ def _rpmvercmp((e1, v1, r1), (e2, v2, r2)):

      """find out which build is newer"""

      if e1 == "None":

          e1 = "0"
@@ -42,33 +42,34 @@ 

          e2 = "0"

      rc = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))

      if rc == 1:

-         #first evr wins

+         # first evr wins

          return 1

      elif rc == 0:

-         #same evr

+         # same evr

          return 0

      else:

-         #second evr wins

+         # second evr wins

          return -1

  

- def _countMissing (build):

+ 

+ def _countMissing(build):

      """find how many builds are missing in local koji"""

      builds = remotekojisession.listTagged(tag, inherit=inherit, package=build['package_name'])

      cnt = 0

      local_evr = (str(build['epoch']), build['version'], build['release'])

  

- 

      for b in builds:

- 	remote_evr = (str(b['epoch']), b['version'], b['release'])

- 	newestRPM = _rpmvercmp(local_evr, remote_evr)

- 	if newestRPM == 0 or newestRPM == 1:

- 	    break

- 	cnt += 1

- 	if cnt > 5:

- 	    break

+         remote_evr = (str(b['epoch']), b['version'], b['release'])

+         newestRPM = _rpmvercmp(local_evr, remote_evr)

+         if newestRPM == 0 or newestRPM == 1:

+             break

+         cnt += 1

+         if cnt > 5:

+             break

  

      return cnt

  

+ 

  localkojisession = koji.ClientSession(LOCALKOJIHUB)

  remotekojisession = koji.ClientSession(REMOTEKOJIHUB)

  
@@ -84,51 +85,52 @@ 

  cnt['remote_only'] = 0

  cnt['total_missing_builds'] = 0

  

- local_pkgs = sorted(localkojisession.listTagged(tag, inherit=inherit, latest=True), key = lambda pkg: pkg['package_name'])

- remote_pkgs = sorted(remotekojisession.listTagged(tag, inherit=inherit, latest=True), key = lambda pkg: pkg['package_name'])

+ local_pkgs = sorted(localkojisession.listTagged(tag, inherit=inherit, latest=True), key=lambda pkg: pkg['package_name'])

+ remote_pkgs = sorted(remotekojisession.listTagged(tag, inherit=inherit, latest=True),

+                      key=lambda pkg: pkg['package_name'])

  

  local_num = len(local_pkgs)

  remote_num = len(remote_pkgs)

  

- 

  while (local < local_num) or (remote < remote_num):

  

      if remote_pkgs[remote]['package_name'] == local_pkgs[local]['package_name']:

          local_evr = (str(local_pkgs[local]['epoch']), local_pkgs[local]['version'], local_pkgs[local]['release'])

          remote_evr = (str(remote_pkgs[remote]['epoch']), remote_pkgs[remote]['version'], remote_pkgs[remote]['release'])

  

- 	newestRPM = _rpmvercmp(local_evr, remote_evr)

- 	if newestRPM == 0:

- 	    print("same: local and remote: %s " % local_pkgs[local]['nvr'])

- 	    cnt['same'] += 1

+         newestRPM = _rpmvercmp(local_evr, remote_evr)

+         if newestRPM == 0:

+             print("same: local and remote: %s " % local_pkgs[local]['nvr'])

+             cnt['same'] += 1

          if newestRPM == 1:

              print("newer locally: local: %s remote: %s" % (local_pkgs[local]['nvr'], remote_pkgs[remote]['nvr']))

- 	    cnt['newer'] += 1

+             cnt['newer'] += 1

          if newestRPM == -1:

- 	    missing = _countMissing(local_pkgs[local])

- 	    if missing > 5:

- 		txt = "more than 5"

- 	    else:

- 		txt = "%d" % missing

+             missing = _countMissing(local_pkgs[local])

+             if missing > 5:

+                 txt = "more than 5"

+             else:

+                 txt = "%d" % missing

  

-             print("newer remote: local: %s remote: %s with %s build(s) missing" % (local_pkgs[local]['nvr'], remote_pkgs[remote]['nvr'], txt))

- 	    cnt['total_missing_builds'] += missing

- 	    cnt['older'] += 1

+             print("newer remote: local: %s remote: %s with %s build(s) missing" % (

+                 local_pkgs[local]['nvr'], remote_pkgs[remote]['nvr'], txt))

+             cnt['total_missing_builds'] += missing

+             cnt['older'] += 1

  

- 	local += 1

- 	remote += 1

+         local += 1

+         remote += 1

  

      elif remote_pkgs[remote]['package_name'] > local_pkgs[local]['package_name']:

-     	print("only locally: %s" % local_pkgs[local]['nvr'])

- 	local += 1

- 	cnt['local_only'] += 1

+         print("only locally: %s" % local_pkgs[local]['nvr'])

+         local += 1

+         cnt['local_only'] += 1

  

      elif remote_pkgs[remote]['package_name'] < local_pkgs[local]['package_name']:

-     	print("only remote: %s" % remote_pkgs[remote]['nvr'])

- 	remote += 1

- 	cnt['remote_only'] += 1

+         print("only remote: %s" % remote_pkgs[remote]['nvr'])

+         remote += 1

+         cnt['remote_only'] += 1

  

  #    if cnt['older'] == 5:

- #	break

+ #    break

  

  print("statistics: %s" % cnt)

file modified
+42 -34
@@ -13,7 +13,7 @@ 

  import time

  import random

  import string

- import rpm 

+ import rpm

  import shutil

  import argparse

  import tempfile
@@ -51,6 +51,7 @@ 

  

  pg = progress.TextMeter()

  

+ 

  def _unique_path(prefix):

      """Create a unique path fragment by appending a path component

      to prefix.  The path component will consist of a string of letter and numbers
@@ -60,7 +61,8 @@ 

      # For some reason repr(time.time()) includes 4 or 5

      # more digits of precision than str(time.time())

      return '%s/%r.%s' % (prefix, time.time(),

-                       ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+                          ''.join([random.choice(string.ascii_letters) for i in range(8)]))

+ 

  

  def isNoarch(rpms):

      if not rpms:
@@ -68,22 +70,24 @@ 

      noarch = False

      for rpminfo in rpms:

          if rpminfo['arch'] == 'noarch':

-             #note that we've seen a noarch rpm

+             # note that we've seen a noarch rpm

              noarch = True

          elif rpminfo['arch'] != 'src':

              return False

      return noarch

  

+ 

  def tagSuccessful(nvr, tag):

      """tag completed builds into final tags"""

      localkojisession.tagBuildBypass(tag, nvr)

      logging.info("tagged %s to %s" % (nvr, tag))

  

+ 

  def _downloadURL(url, destf):

      """Download a url and save it to a file"""

-     file = grabber.urlopen(url, progress_obj = pg, text = "%s" % (destf))

+     file = grabber.urlopen(url, progress_obj=pg, text="%s" % (destf))

  

-     out = os.open(destf, os.O_WRONLY|os.O_CREAT|os.O_TRUNC, 0666)

+     out = os.open(destf, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666)

      try:

          while 1:

              buf = file.read(4096)
@@ -94,24 +98,26 @@ 

          os.close(out)

          file.close()

  

+ 

  def _importURL(url, fn):

      """Import an rpm directly from a url"""

      serverdir = _unique_path('build-recent')

-     #TODO - would be possible, using uploadFile directly, to upload without writing locally.

-     #for now, though, just use uploadWrapper

+     # TODO - would be possible, using uploadFile directly, to upload without writing locally.

+     # for now, though, just use uploadWrapper

      koji.ensuredir(workpath)

      dst = "%s/%s" % (workpath, fn)

      logging.info("Downloading %s to %s..." % (url, dst))

      _downloadURL(url, dst)

-     #fsrc = urllib2.urlopen(url)

-     #fdst = file(dst, 'w')

-     #shutil.copyfileobj(fsrc, fdst)

-     #fsrc.close()

-     #fdst.close()

+     # fsrc = urllib2.urlopen(url)

+     # fdst = file(dst, 'w')

+     # shutil.copyfileobj(fsrc, fdst)

+     # fsrc.close()

+     # fdst.close()

      logging.info("Uploading %s..." % dst)

      localkojisession.uploadWrapper(dst, serverdir, blocksize=65536)

      localkojisession.importRPM(serverdir, fn)

  

+ 

  def importBuild(rpms, buildinfo, tag=None):

      '''import a build from remote hub'''

      for rpminfo in rpms:
@@ -125,26 +131,27 @@ 

      try:

          _importURL(url, fname)

      except:

- 	logging.error("Importing %s failed" % fname)

- 	return False

+         logging.error("Importing %s failed" % fname)

+         return False

      else:

          for rpminfo in rpms:

- 	    if rpminfo['arch'] == 'src':

-     		#already imported above

-         	continue

-     	    relpath = pathinfo.rpm(rpminfo)

-     	    url = "%s/%s" % (build_url, relpath)

-     	    logging.debug("url: %s" % url)

-     	    fname = os.path.basename(relpath)

-     	    logging.debug("fname: %s" % fname)

- 	    try:

-     		_importURL(url, fname)

- 	    except:

- 		logging.error("Importing %s failed" % fname)

- 		return False

- 

- 	tagSuccessful(buildinfo['nvr'], tag)

- 	return True

+             if rpminfo['arch'] == 'src':

+                 # already imported above

+                 continue

+             relpath = pathinfo.rpm(rpminfo)

+             url = "%s/%s" % (build_url, relpath)

+             logging.debug("url: %s" % url)

+             fname = os.path.basename(relpath)

+             logging.debug("fname: %s" % fname)

+             try:

+                 _importURL(url, fname)

+             except:

+                 logging.error("Importing %s failed" % fname)

+                 return False

+ 

+         tagSuccessful(buildinfo['nvr'], tag)

+         return True

+ 

  

  # setup the koji session

  logging.info('Setting up koji session')
@@ -167,12 +174,13 @@ 

  

      rpms = remotekojisession.listRPMs(buildinfo['id'])

      if isNoarch(rpms):

- 	buildinfo = remotekojisession.getBuild(buildinfo['id'])

+         buildinfo = remotekojisession.getBuild(buildinfo['id'])

          if args.force:

              localkojisession.resetBuild(buildinfo['nvr'])

-             localkojisession.createEmptyBuild(buildinfo['package_name'], buildinfo['version'], buildinfo['release'], buildinfo['epoch'])

- 	importBuild(rpms, buildinfo, tag=args.tag)

+             localkojisession.createEmptyBuild(buildinfo['package_name'], buildinfo['version'], buildinfo['release'],

+                                               buildinfo['epoch'])

+         importBuild(rpms, buildinfo, tag=args.tag)

      else:

- 	logging.error("not a pure noarch build")

+         logging.error("not a pure noarch build")

  

  shutil.rmtree(workpath)

file modified
+23 -23
@@ -1,13 +1,13 @@ 

  #!/usr/bin/python

  #

- # koji-reimport.py: Reset builds and re-import corrupt noarch packages 

- # on secondary kojis. 

+ # koji-reimport.py: Reset builds and re-import corrupt noarch packages

+ # on secondary kojis.

  #

- # Copyright (c) 2014 Red Hat, Inc. 

+ # Copyright (c) 2014 Red Hat, Inc.

  #

  # SPDX-License-Identifier: GPL-2.0

  #

- # Authors: 

+ # Authors:

  #   David Aquilina <dwa@redhat.com>

  

  from __future__ import print_function
@@ -18,15 +18,15 @@ 

  import shutil

  import argparse

  

- # fill these in: 

- # pkgs to re-import: 

+ # fill these in:

+ # pkgs to re-import:

  pkgs = ['']

- # tag to tag them with: 

+ # tag to tag them with:

  tag = ''

  

  # setup koji sessions:

  parser = argparse.ArgumentParser()

- parser.add_argument('-p','--koji-profile', help='Koji profile for alternate arches',required=True)

+ parser.add_argument('-p', '--koji-profile', help='Koji profile for alternate arches', required=True)

  args = parser.parse_args()

  sec_profile = args.koji_profile

  
@@ -36,26 +36,26 @@ 

  secondary = secondarykoji.ClientSession(secondarykoji.config.server)

  secondary.krb_login()

  

- # do the thing: 

+ # do the thing:

  

- for pkg in pkgs: 

-     print('Parsing package '+pkg)

-     # get build info: 

+ for pkg in pkgs:

+     print('Parsing package ' + pkg)

+     # get build info:

      buildinfo = primary.getBuild(pkg)

-     # reset the build on secondary: 

+     # reset the build on secondary:

      secondary.untagBuild(tag, pkg)

      secondary.resetBuild(pkg)

-     # create an empty build: 

-     secondary.createEmptyBuild(buildinfo['package_name'], buildinfo['version'], buildinfo['release'], buildinfo['epoch'])

-     # quick and dirty from here... 

-     # create temporary dir, throw rpms into it: 

-     tempdir = tempfile.mkdtemp() 

-     subprocess.call(['koji', 'download-build', pkg], cwd=tempdir) 

+     # create an empty build:

+     secondary.createEmptyBuild(buildinfo['package_name'], buildinfo['version'], buildinfo['release'],

+                                buildinfo['epoch'])

+     # quick and dirty from here...

+     # create temporary dir, throw rpms into it:

+     tempdir = tempfile.mkdtemp()

+     subprocess.call(['koji', 'download-build', pkg], cwd=tempdir)

      # verify RPMs are good, if so, import them:

      subprocess.check_call(['rpm -K *.rpm'], cwd=tempdir, shell=True)

-     subprocess.call(['%s import *.rpm'%(sec_profile)], cwd=tempdir, shell=True)

-     # Tag: 

-     secondary.tagBuild(tag, pkg) 

+     subprocess.call(['%s import *.rpm' % (sec_profile)], cwd=tempdir, shell=True)

+     # Tag:

+     secondary.tagBuild(tag, pkg)

      # Remove the temp dir

      shutil.rmtree(tempdir)

- 

file modified
+48 -49
@@ -9,7 +9,7 @@ 

  

  

  import fedmsg

- import threading 

+ import threading

  from collections import deque

  import time

  import koji
@@ -20,23 +20,23 @@ 

  import ConfigParser

  import argparse

  

- parser = argparse.ArgumentParser() 

+ parser = argparse.ArgumentParser()

  parser.add_argument("-c", "--config-file", dest="shadowconfig",

-                   default="/etc/koji-shadow/koji-shadow.conf",

-                   help="koji-shadow configuration file")

- parser.add_argument("--shadow", dest="shadowcommand", 

-                   help="path to koji-shadow", default="/usr/sbin/koji-shadow")

- parser.add_argument("-l", "--logdir", dest="logdir", 

-                   help="directory to write logs to", 

-                   default="/mnt/koji/reports/koji-stalk")

+                     default="/etc/koji-shadow/koji-shadow.conf",

+                     help="koji-shadow configuration file")

+ parser.add_argument("--shadow", dest="shadowcommand",

+                     help="path to koji-shadow", default="/usr/sbin/koji-shadow")

+ parser.add_argument("-l", "--logdir", dest="logdir",

+                     help="directory to write logs to",

+                     default="/mnt/koji/reports/koji-stalk")

  parser.add_argument("-t", "--test", dest="testonly", action="store_true",

-                   help="Only monitor fedmsg without building", default=False)

- parser.add_argument("--threads", type=int, default="3", 

-                   help="number of threads per distro")

+                     help="Only monitor fedmsg without building", default=False)

+ parser.add_argument("--threads", type=int, default="3",

+                     help="number of threads per distro")

  

  args, extras = parser.parse_known_args()

  

- ### Begin Configuration ###

+ # Begin Configuration

  

  # distributions to build for:

  distronames = ['f20', 'f21', 'f22', 'f23']
@@ -47,11 +47,11 @@ 

  # koji setup

  remote = koji.ClientSession('http://koji.fedoraproject.org/kojihub')

  

- # Configuration options below have been converted to use options. 

+ # Configuration options below have been converted to use options.

  # If you want to hard-code values for yourself, do it here:

  

  # number of threads (i.e. max simultaneous koji-shadow instances) per distro

- threads = int(args.threads) 

+ threads = int(args.threads)

  

  # Don't actually build anything or attempt to tag, write logs to /tmp

  testonly = args.testonly
@@ -61,20 +61,20 @@ 

  shadowconfig = args.shadowconfig

  logdir = args.logdir

  

- 

- ### End configuration ### 

+ # End configuration

  

  # Do some stuff for testing mode:

  if testonly:

      shadowcommand = '/bin/echo'

      logdir = '/tmp'

  

- # logging setup 

+ # logging setup

  # Setting up two handlers lets us simultaneously log to a file & to stdout

  # TODO: Rotate and/or only keep X amount of logs

  logger = logging.getLogger('KojiStalk')

  logger.setLevel(logging.DEBUG)

- fh = logging.handlers.TimedRotatingFileHandler(os.path.join(logdir, 'KojiStalk.log'), when='d', interval=7, backupCount=8)

+ fh = logging.handlers.TimedRotatingFileHandler(os.path.join(logdir, 'KojiStalk.log'), when='d', interval=7,

+                                                backupCount=8)

  fh.setLevel(logging.DEBUG)

  ch = logging.StreamHandler()

  ch.setLevel(logging.DEBUG)
@@ -84,7 +84,6 @@ 

  logger.addHandler(ch)

  logger.addHandler(fh)

  

- 

  # Have to warn about testing /after/ we set up the logger...

  if testonly:

      logger.warn("Running in test mode!")
@@ -100,10 +99,12 @@ 

  

  distqueues = {}

  for distro in distronames:

-     distqueues[distro] = deque() 

+     distqueues[distro] = deque()

+ 

  

  class KojiStalk(threading.Thread):

-     """ Use fedmsg to monitor what koji.fp.o is building""" 

+     """ Use fedmsg to monitor what koji.fp.o is building"""

+ 

      def __init__(self, buildqueue):

          threading.Thread.__init__(self)

          self.buildqueue = buildqueue
@@ -116,16 +117,13 @@ 

  

          # parse the koji-shadow configuration to ignore packages we know

          # we don't care about.

-         ignorelist = (ks_config.get('rules', 'excludelist').split() + 

-             ks_config.get('rules', 'ignorelist').split())

+         ignorelist = (ks_config.get('rules', 'excludelist').split() + ks_config.get('rules', 'ignorelist').split())

  

          logger.debug('Monitoring fedmsg.')

          for name, endpoint, topic, msg in fedmsg.tail_messages(**config):

-             if (msg['topic'] == 

-                 'org.fedoraproject.prod.buildsys.build.state.change' and 

-                 msg['msg']['new'] == 1 and 

-                 msg['msg']['name'] not in ignorelist):

-                 buildqueue.append(msg['msg']['name']+'-'+msg['msg']['version']+'-'+msg['msg']['release'])

+             if (msg['topic'] == 'org.fedoraproject.prod.buildsys.build.state.change' and msg['msg']['new'] == 1 and msg['msg']['name'] not in ignorelist):

+                 buildqueue.append(msg['msg']['name'] + '-' + msg['msg']['version'] + '-' + msg['msg']['release'])

+ 

  

  class BuildFromDistroQueues(threading.Thread):

      def __init__(self, distro):
@@ -136,14 +134,15 @@ 

      def run(self):

          while True:

              # excessive debugging:

-             #logger.debug('Checking queue for %s', self.distro)

-             if distqueues[self.distro]:  

+             # logger.debug('Checking queue for %s', self.distro)

+             if distqueues[self.distro]:

                  build_nvr(distqueues[self.distro].popleft(), self.distro)

              else:

                  time.sleep(60)

  

+ 

  def sort_nvr(nvr):

-     """ Query koji.fp.o for the target used for a build, then dump the NVR 

+     """ Query koji.fp.o for the target used for a build, then dump the NVR

          into the appropriate distro-specific queue """

      logger.debug('Analyzing %s', nvr)

      data = remote.getBuild(nvr)
@@ -151,64 +150,63 @@ 

      for distro in distronames:

          if re.search(distro, buildtarget):

              distqueues[distro].append(nvr)

-             #logger.debug('Placing %s in %s', nvr, distro)

+             # logger.debug('Placing %s in %s', nvr, distro)

              return

      if re.search('rawhide', buildtarget):

          distqueues[rawhide].append(nvr)

      else:

          logger.info('Ignored %s from %s', nvr, buildtarget)

  

+ 

  def build_nvr(nvr, distro):

      """ Use koji-shadow to build a given NVR """

      # Pull newer deps from the -build tag to catch any updates & overrides

-     buildtag = distro+'-build'

+     buildtag = distro + '-build'

      if distro == rawhide:

-         desttag = rawhide 

+         desttag = rawhide

      else:

-         desttag = distro+'-updates-candidate'

+         desttag = distro + '-updates-candidate'

      # Log koji-shadow output

      shadowlog = open(os.path.join(logdir, nvr), 'w')

-     # The command line parameters assume that prefer-new and import-noarch are 

-     # specified in your koji-shadow config file. You should also be using a 

+     # The command line parameters assume that prefer-new and import-noarch are

+     # specified in your koji-shadow config file. You should also be using a

      # version of koji-shadow which supports prefer-new and --build in combination

-     build = subprocess.call([shadowcommand, '-c', shadowconfig, 

-                             '--build', nvr, buildtag], stdout=shadowlog, 

+     build = subprocess.call([shadowcommand, '-c', shadowconfig,

+                              '--build', nvr, buildtag], stdout=shadowlog,

                              stderr=shadowlog)

      # koji-shadow doesn't return exit codes, so the only way we know a

-     # build failed is if tagging the NVR fails. 

+     # build failed is if tagging the NVR fails.

      if not testonly:

          try:

              local.tagBuild(desttag, nvr)

              logger.info('Built & tagged: %s', nvr)

              # If we got this far, we don't care about the log file.

              os.unlink(os.path.join(logdir, nvr))

-         except: 

+         except:

              logger.warn('Failed build: %s', nvr)

      else:

          logger.info('Test Mode: Parsed %s', nvr)

-     

-     

  

- def main():

  

+ def main():

      # Start the thread that listens to fedmsg

-     ks = KojiStalk(buildqueue) 

+     ks = KojiStalk(buildqueue)

      ks.daemon = True

      logger.debug('KojiStalk thread starting')

      ks.start()

  

-     # Start the threads that listen to the distro build queues. 

+     # Start the threads that listen to the distro build queues.

      for distro in distronames:

          for i in range(threads):

              buildthread = BuildFromDistroQueues(distro)

              buildthread.daemon = True

              buildthread.start()

-  

+ 

      logger.debug('Monitoring NVRs queue')

      while True:

          # Sort NVRs we get from fedmsg into dist-specific queues, or wait

          # for more NVRs to show up if the queue is empty.

-         if buildqueue: 

+         if buildqueue:

              nvr = buildqueue.popleft()

              sort_nvr(nvr)

          else:
@@ -219,4 +217,5 @@ 

                          logger.debug('%s queue: %s', distro, distqueues[distro])

              time.sleep(60)

  

+ 

  main()

@@ -14,16 +14,20 @@ 

  

  LOGGER = logging.getLogger(__name__)

  

+ 

  def koji2datetime(d):

      return datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f")

  

+ 

  def bug2str(bug):

      return f"{bug.id} ({bug.summary})"

  

+ 

  def release2disttag(release):

      assert release.startswith('f')

      return 'fc' + release[1:]

  

+ 

  def parse_bool(s):

      if s.lower() in {'0', 'false', 'no'}:

          return False
@@ -31,6 +35,7 @@ 

          return True

      raise ValueError

  

+ 

  def bug_closer(to_close, bz, dry_run):

      while True:

          item = to_close.get()
@@ -46,12 +51,13 @@ 

  {build["nvr"]}: https://koji.fedoraproject.org/koji/buildinfo?buildID={build["id"]}

  """)

          LOGGER.info(f"{bug2str(bug)}\n"

-                      "  → Closing")

+                     "  → Closing")

          if not dry_run:

              bz.update_bugs([bug.id], update)

  

          to_close.task_done()

  

+ 

  def main():

      rebuilds_info = {k: v for k, v in MASSREBUILDS.items() if "buildtag" in v}

  
@@ -112,13 +118,12 @@ 

          if not bug.is_open:

              # DO NOT TOUCH CLOSED BUGZ!

              LOGGER.debug(f"{bug2str(bug)}\n"

-                           "  → Skipping closed bug")

+                          "  → Skipping closed bug")

              continue

-         if (args.strict_title and

-             not bug.summary.startswith(f"{bug.component}: FTBFS in F")):

+         if (args.strict_title and not bug.summary.startswith(f"{bug.component}: FTBFS in F")):

              # They might need special care

              LOGGER.debug(f"{bug2str(bug)}\n"

-                           "  → Skipping bug with non-standard name")

+                          "  → Skipping bug with non-standard name")

              continue

  

          bugs.append(bug)
@@ -137,7 +142,7 @@ 

          builds = [build for build in builds if koji2datetime(build["creation_time"]) >= rebuild_time]

          if not builds:

              LOGGER.debug(f"{bug2str(bug)}\n"

-                           "  → No successful builds")

+                          "  → No successful builds")

              continue

          for build in builds:

              if args.strict_disttag:
@@ -157,5 +162,6 @@ 

      for t in threads:

          t.join()

  

+ 

  if __name__ == "__main__":

      main()

file modified
+49 -41
@@ -15,6 +15,7 @@ 

  import argparse

  

  import gi

+ 

  gi.require_version('Modulemd', '2.0')

  from gi.repository import Modulemd

  
@@ -47,6 +48,7 @@ 

          return 1

      return 0

  

+ 

  # This function needs a dry-run like option

  def runmeoutput(cmd, action, pkg, env, cwd=workdir):

      """Simple function to run a command and return output if successful.
@@ -64,11 +66,13 @@ 

      result = pid.communicate()[0].rstrip('\n')

      return result

  

+ 

  parser = argparse.ArgumentParser()

  parser.add_argument('token_file', help='MBS token file for authentication.')

- #During mass rebuild, we need to check if a module has build time dependencies on module_mass_rebuild_platform

- #During mass branching, we need to check if a module has run time dependencies on module_mass_branching_platform

- parser.add_argument('process', help='build or branch, build is used during mass rebuild time, branch is used during branching time')

+ # During mass rebuild, we need to check if a module has build time dependencies on module_mass_rebuild_platform

+ # During mass branching, we need to check if a module has run time dependencies on module_mass_branching_platform

+ parser.add_argument('process',

+                     help='build or branch, build is used during mass rebuild time, branch is used during branching time')

  args = parser.parse_args()

  

  if __name__ == '__main__':
@@ -78,7 +82,7 @@ 

          token = f.read().strip()

      pdc = 'https://pdc.fedoraproject.org/'

      modules = []

-     #Query pdc to get the modules that are not eol'd

+     # Query pdc to get the modules that are not eol'd

      url = '{0}/rest_api/v1/component-branch-slas/?page_size=100&branch_type=module&branch_active=1'.format(pdc)

      while True:

          rv = requests.get(url)
@@ -87,15 +91,15 @@ 

          rv_json = rv.json()

          for sla in rv_json['results']:

              module = {}

-             #module['module_name'] = sla['branch']['global_component']

-             #module['module_stream'] = sla['branch']['name']

+             # module['module_name'] = sla['branch']['global_component']

+             # module['module_stream'] = sla['branch']['name']

              module[sla['branch']['global_component']] = sla['branch']['name']

-             if not module in modules:

+             if module not in modules:

                  modules.append(module)

          url = rv_json['next']

          if not url:

              break

-     #print(modules)

+     # print(modules)

  

      # Environment for using releng credentials for pushing and building

      enviro['GIT_SSH'] = '/usr/local/bin/relengpush'
@@ -105,37 +109,39 @@ 

      }

  

      for module in modules:

-         if(len(list(module.keys()))) > 1:

+         if (len(list(module.keys()))) > 1:

              print('Something is wrong, {} has more than 1 stream in the dict'.format(list(module.keys())[0]))

              continue

          else:

              name = list(module.keys())[0]

              stream = module[name]

-             #Get the list of builds that are submitted after the module epoch datetime

-             #Use this info to figure out whether you need to resubmit the build or not

-             #This is useful when the script execution fails for unknown reasons and

-             #dont have to submit all the builds again.

+             # Get the list of builds that are submitted after the module epoch datetime

+             # Use this info to figure out whether you need to resubmit the build or not

+             # This is useful when the script execution fails for unknown reasons and

+             # dont have to submit all the builds again.

              if process == 'build':

-                 mbs="https://mbs.fedoraproject.org/module-build-service/1/module-builds/?submitted_after={}&name={}&stream={}&state=ready&state=init&state=wait&state=build&state=done".format(module_mass_rebuild_epoch,name,stream)

+                 mbs = "https://mbs.fedoraproject.org/module-build-service/1/module-builds/?submitted_after={}&name={}&stream={}&state=ready&state=init&state=wait&state=build&state=done".format(

+                     module_mass_rebuild_epoch, name, stream)

              elif process == 'branch':

-                 mbs="https://mbs.fedoraproject.org/module-build-service/1/module-builds/?submitted_after={}&name={}&stream={}&state=ready&state=init&state=wait&state=build&state=done".format(module_mass_branching_epoch,name,stream)

+                 mbs = "https://mbs.fedoraproject.org/module-build-service/1/module-builds/?submitted_after={}&name={}&stream={}&state=ready&state=init&state=wait&state=build&state=done".format(

+                     module_mass_branching_epoch, name, stream)

              else:

                  print("Please select either build or branch for the process type")

                  sys.exit(1)

  

              rv = requests.get(mbs)

              if not rv.ok:

-                 print("Unable to get info about {} module and {} stream, skipping the build".format(name,stream))

+                 print("Unable to get info about {} module and {} stream, skipping the build".format(name, stream))

                  continue

              rv_json = rv.json()

-             #Check if a module build is already submitted after the epoch date

+             # Check if a module build is already submitted after the epoch date

              if rv_json['meta']['total'] != 0:

-                 print("Skipping {} module build for {} stream, since its already built".format(name,stream))

+                 print("Skipping {} module build for {} stream, since its already built".format(name, stream))

              else:

                  # Check if the clone already exists

                  if not os.path.exists(os.path.join(workdir, name)):

                      # Clone module git

-                     fedpkgcmd = ['fedpkg', '--user', 'releng', 'clone', 'modules/'+name]

+                     fedpkgcmd = ['fedpkg', '--user', 'releng', 'clone', 'modules/' + name]

                      print('Cloning module %s' % name)

                      if runme(fedpkgcmd, 'fedpkg', name, enviro):

                          continue
@@ -144,7 +150,7 @@ 

                  fedpkgcheckoutcmd = ['fedpkg', 'switch-branch', stream]

                  print('Checking out the %s stream branch' % stream)

                  if runme(fedpkgcheckoutcmd, 'fedpkg', stream, enviro,

-                                  cwd=os.path.join(workdir, name)):

+                          cwd=os.path.join(workdir, name)):

                      continue

  

                  # Check for a noautobuild file
@@ -154,13 +160,13 @@ 

                      continue

  

                  # Find the modulemd file

-                 if os.path.exists(os.path.join(workdir, name, name+'.yaml')):

-                     modulemd = os.path.join(workdir, name, name+'.yaml')

+                 if os.path.exists(os.path.join(workdir, name, name + '.yaml')):

+                     modulemd = os.path.join(workdir, name, name + '.yaml')

                  else:

                      sys.stderr.write('%s failed modulemd check\n' % name)

                      continue

  

-                 #Use libmodulemd to determine if this module stream applies to this platform version

+                 # Use libmodulemd to determine if this module stream applies to this platform version

                  try:

                      mmd = Modulemd.ModuleStream.read_file(modulemd, True)

                  except:
@@ -168,19 +174,18 @@ 

                      continue

                  if process == 'build':

                      platform = massrebuild['module_mass_rebuild_platform']

-                     #check if a module has build time dependency on platform

+                     # check if a module has build time dependency on platform

                      needs_building = mmd.build_depends_on_stream('platform', platform)

                  elif process == 'branch':

                      platform = massrebuild['module_mass_branching_platform']

-                     #check if a module has run time dependency on platform

+                     # check if a module has run time dependency on platform

                      needs_building = mmd.depends_on_stream('platform', platform)

                  else:

                      print("Please select either build or branch for the process type")

                      sys.exit(1)

  

- 

                  if not needs_building:

-                     print("Not required to build module {} for stream {}".format(name,stream))

+                     print("Not required to build module {} for stream {}".format(name, stream))

                      continue

                  else:

                      # Set the git user.name and user.email
@@ -188,42 +193,44 @@ 

                      set_mail = ['git', 'config', 'user.email', 'releng@fedoraproject.org']

                      print('Setting git user.name and user.email')

                      if runme(set_name, 'set_name', name, enviro,

-                                  cwd=os.path.join(workdir, name)):

+                              cwd=os.path.join(workdir, name)):

                          continue

                      if runme(set_mail, 'set_mail', name, enviro,

-                                  cwd=os.path.join(workdir, name)):

+                              cwd=os.path.join(workdir, name)):

                          continue

  

                      # Empty git commit

                      if process == 'build':

                          commit = ['git', 'commit', '-s', '--allow-empty', '-m', comment]

                      elif process == 'branch':

-                         commit = ['git', 'commit', '-s', '--allow-empty', '-m', 'Branching {} from rawhide, second attempt after platform:f31 enablement'.format(rebuildid)]

+                         commit = ['git', 'commit', '-s', '--allow-empty', '-m',

+                                   'Branching {} from rawhide, second attempt after platform:f31 enablement'.format(

+                                       rebuildid)]

                      else:

                          print("Please select either build or branch for the process type")

                          sys.exit(1)

                      print('Committing changes for %s' % name)

                      if runme(commit, 'commit', name, enviro,

-                                  cwd=os.path.join(workdir, name)):

+                              cwd=os.path.join(workdir, name)):

                          continue

  

-                     #Push the empty commit

+                     # Push the empty commit

                      push = ['fedpkg', 'push']

                      print('Pushing changes for %s' % name)

                      if runme(push, 'push', name, enviro,

-                                  cwd=os.path.join(workdir, name)):

+                              cwd=os.path.join(workdir, name)):

                          continue

  

                      # get git url

                      urlcmd = ['fedpkg', 'giturl']

                      print('Getting git url for %s' % name)

                      url = runmeoutput(urlcmd, 'giturl', name, enviro,

-                                  cwd=os.path.join(workdir, name))

+                                       cwd=os.path.join(workdir, name))

                      if not url:

                          continue

-                     #mbs requires git url to have ?# before git hash

-                     #whereas fedpkg giturl returns just # before the hash

-                     #This will replace # with ?# for this reason

+                     # mbs requires git url to have ?# before git hash

+                     # whereas fedpkg giturl returns just # before the hash

+                     # This will replace # with ?# for this reason

                      url = url.replace('#', '?#')

  

                      # Module build
@@ -246,14 +253,15 @@ 

                          print("Please select either build or branch for the process type")

                          sys.exit(1)

  

-                     rv = requests.post('https://mbs.fedoraproject.org/module-build-service/2/module-builds/', data=data, headers=headers)

+                     rv = requests.post('https://mbs.fedoraproject.org/module-build-service/2/module-builds/', data=data,

+                                        headers=headers)

                      if rv.ok:

-                         print('Building {} module for stream {}'.format(name,stream))

-                         #pprint(rv.json())

+                         print('Building {} module for stream {}'.format(name, stream))

+                         # pprint(rv.json())

                      elif rv.status_code == 401:

                          print('The token is unauthorized', file=sys.stderr)

                          print(rv.text)

                          sys.exit(1)

                      else:

                          print(rv.text)

-                         print('Unable to submit the module build {} for branch {}'.format(name,stream))

+                         print('Unable to submit the module build {} for branch {}'.format(name, stream))

file modified
+72 -19
@@ -19,22 +19,23 @@ 

  # Set some variables

  # Some of these could arguably be passed in as args.

  

- #NOTE: The ordering of inputs matters. Please provide master/rawhide inputs first.

+ # NOTE: The ordering of inputs matters. Please provide master/rawhide inputs first.

  

- buildtag = 'f27' # tag to build from

- secondbuildtag = 'f26' # tag to build from

- epoch = '2017-05-12 00:00:00.000000' # rebuild anything not built after this date

+ buildtag = 'f27'  # tag to build from

+ secondbuildtag = 'f26'  # tag to build from

+ epoch = '2017-05-12 00:00:00.000000'  # rebuild anything not built after this date

  user = 'Fedora Release Engineering <rel-eng@lists.fedoraproject.org>'

  comment = '- Rebuilt for https://fedoraproject.org/wiki/Fedora_26_27_Mass_Rebuild'

  workdir = os.path.expanduser('~/massbuild-gcc')

  enviro = os.environ

- targets = ['f27-gcc-abi-rebuild','f26-gcc-abi-rebuild'] #Set master/rawhide rebuild target first item in the list

- branches = ['master', 'f26'] #Set master(rawhide) branch first item in the list

- enviro['CVS_RSH'] = 'ssh' # use ssh for cvs

+ targets = ['f27-gcc-abi-rebuild', 'f26-gcc-abi-rebuild']  # Set master/rawhide rebuild target first item in the list

+ branches = ['master', 'f26']  # Set master(rawhide) branch first item in the list

+ enviro['CVS_RSH'] = 'ssh'  # use ssh for cvs

  

  pkg_skip_list = ['fedora-release', 'fedora-repos', 'fedora-modular-release', 'fedora-modular-repos', 'generic-release',

-         'redhat-rpm-config', 'shim', 'shim-signed', 'shim-unsigned-aarch64', 'shim-unsigned-x64', 'kernel',

-         'linux-firmware', 'grub2', 'openh264', 'glibc32']

+                  'redhat-rpm-config', 'shim', 'shim-signed', 'shim-unsigned-aarch64', 'shim-unsigned-x64', 'kernel',

+                  'linux-firmware', 'grub2', 'openh264', 'glibc32']

+ 

  

  # Define functions

  
@@ -53,9 +54,10 @@ 

          return 1

      return 0

  

+ 

  # This function needs a dry-run like option

  def runmeoutput(cmd, action, pkg, env, cwd=workdir):

-     """Simple function to run a command and return output if successful. 

+     """Simple function to run a command and return output if successful.

         cmd is a list of the command and arguments, action is a

         name for the action (for logging), pkg is the name of the package

         being operated on, env is the environment dict, and cwd is where
@@ -75,7 +77,58 @@ 

  kojisession = koji.ClientSession('https://koji.fedoraproject.org/kojihub')

  

  # Generate a list of packages to iterate over

- pkgs = ['libtorrent','ceres-solver','codeblocks','opencv','mmseq','zhu3d','libmediainfo','fawkes','fmt','step','mlpack','hyperrogue','root','vxl','repsnapper','tellico','pcl','avogadro2-libs','znc','purple-line','nload','jsoncpp','libepubgen','device-mapper-persistent-data','libgnomecanvasmm26','kig','coan','plotmm','fuse-emulator-utils','eris','harmonyseq','afflib','libopenraw','coin-or-Alps','ccgo','cfdg','linbox','recoll','votca-csg','flxmlrpc','coin-or-CoinUtils','stage','glogg','mongo-cxx-driver','libmediainfo','gsmartcontrol','ochusha','verilator','rb_libtorrent','rmol','exempi','pdns-recursor','gparted','ardour2','librime','mm3d','qt5-qtconnectivity','airtsp','normaliz','vulkan','kf5-kio','gpsim','monotone','mmapper','ember','qpdf','wireshark','mapserver','filezilla','extremetuxracer','mypaint','alliance','kstars','qgis','ardour5','opencv','qcad','passenger','calf','qt5-qtbase','kopete','digikam','nodejs-mapnik','stellarium','opencity','icecat','qm-vamp-plugins','pingus','abiword','goldendict','gtengine','condor','cloudy','healpix','latte-integrale','celestia','din','perl-Boost-Geometry-Utils','libpar2','libvisio','coin-or-Bcp','love','3Depict','flrig','libpagemaker','pan','engauge-digitizer','libzmf','scorchwentbonkers','libfilezilla','v4l-utils','gdl','uhd','libfreehand','llvm3.9','sword','nextcloud-client','protobuf','crawl','libmspub','pynac','istatd','kblocks','libetonyek','kf5-knotifications','fawkes','minion','tcpflow','ignition-math','libwps','fcl','ergo','libpwiz','endless-sky','webkitgtk4','apitrace','libghemical','coin-or-OS','binutils','coin-or-Dip','adanaxisgpl','extundelete','synfig','stxxl','libminc','cairomm','amftools','mingw-qt5-qtbase','gnuplot','tellico','rdfind','freefem++','rocs','kcachegrind','rosegarden4','libmp4v2','qesteidutil','qcustomplot','code-editor','rcsslogplayer','pulseview','libdxfrw','mlpack','worker','aseman-qt-tools','stockfish','massif-visualizer','cryptominisat','powertop','usbguard','thrift','mmseq','fparser','audacity','poedit','panoglview','sonic-visualiser','mathex','getdp','librealsense','onboard','libgltf','aria2','innoextract','fritzing','flamerobin','gambas3','openmsx','oxygen-gtk2','asymptote','repsnapper','openscad','givaro','ompl','bionetgen','xrootd','qt5-qt3d','coot','glob2','nyquist','avogadro2-libs','cmake','libcec','qwtplot3d','kdevplatform','xapian-core','zpaq','libstaroffice','link-grammar','synergy','dasher','vcftools','ldc','lnav','coin-or-Couenne','dwlocstat','gimagereader','zimlib','assimp','coin-or-lemon','wxGTK3','sdformat','liblsl','faust','hugin','coin-or-Cgl','rtorrent','dssp','zhu3d','botan','podofo','xfce4-hardware-monitor-plugin','libffado','grive2','coin-or-Ipopt','jmtpfs','flare-engine','lhapdf','airinv','liborigin2','vdrift','xylib','libcdr','plasma-workspace','adevs','plee-the-bear','libkdtree++','votca-xtp','ginac','libdigidocpp','qt','mapnik','hyperrogue','antimony','qlandkartegt','zorba','gdb','orocos-kdl','libmwaw','cube','libfplll','rawtherapee','skyviewer','xsd','stdair','tarantool','wt','zeromq','votca-tools','par2cmdline','leveldb','vfrnav','ispc','kst','heaptrack','brial','wcm','muse','kf5-akonadi-server','yadex','lifeograph','scantailor','erlang-eleveldb','fmt','osmium-tool','libcmis','exiv2','ghemical','gwenview','scram','step','qt5-qtlocation','icedtea-web','stk','vxl','gimp-dbp','shiboken','fldigi','coin-or-Cbc','gxemul','mathicgb','orsa','dnsdist','wfmath','brewtarget','compat-wxGTK3-gtk2','gtatool','flann','simple-mtpfs','codeblocks','povray','qtwebkit','texstudio','sleuthkit','openoffice.org-diafilter','libndn-cxx','patchelf','qt5-qtwebkit','cvc4','gjs','figtoipe','dvgrab','adobe-source-libraries','galera','sockperf','pgRouting','ceph','community-mysql','mecab','kf5-kactivities-stats','libwpd','erlang-basho_metrics','libodb','libjson-rpc-cpp','vdr-vnsiserver','guitarix','gmsh','gqrx','cryptominisat4','openCOLLADA','coin-or-FlopC++','ncrack','crrcsim','kdevelop','enigma','yosys','mpqc','libjingle','ocrad','mysql++','swig','dosbox','fityk','gnucap','xplanet','frepple','zeitgeist','beediff','extrema','coin-or-Bonmin','lldb','wxmacmolplt','kalarm','lilypond','stardict','gobby05','tapkee','qdigidoc','slic3r-prusa3d','coin-or-Bcps','slic3r','openigtlink','engrid','psi4','musique','lziprecover','qblade','voro++','pdfedit','libwpg','coin-or-Clp','pavucontrol','lld','indi-eqmod','openlierox','lshw','barry','gazebo','tomahawk','kismet','polyclipping','libQGLViewer','opencc','ceres-solver','vips','procinfo-ng','liborigin','eclib','chromaprint','libzypp','coin-or-Blis','owncloud-client','pcb2gcode','muParser','cxsc','task','xtide','fmit','mariadb','libint2','supertux','libabigail','krita','btbuilder','python-mapnik','freeorion','kicad','asgp','root','mingw-qt','supertuxkart','gammaray','warsow','warzone2100','sagemath','cross-gcc','lincity-ng','widelands','vtk','texlive','insight','marble-widget','zaz','marble-subsurface','percona-xtrabackup','polymake','blender','mscore','efl','marsshooter','seqan','mozjs45','thunderbird']

+ pkgs = ['libtorrent', 'ceres-solver', 'codeblocks', 'opencv', 'mmseq', 'zhu3d', 'libmediainfo', 'fawkes', 'fmt', 'step',

+         'mlpack', 'hyperrogue', 'root', 'vxl', 'repsnapper', 'tellico', 'pcl', 'avogadro2-libs', 'znc', 'purple-line',

+         'nload', 'jsoncpp', 'libepubgen', 'device-mapper-persistent-data', 'libgnomecanvasmm26', 'kig', 'coan',

+         'plotmm', 'fuse-emulator-utils', 'eris', 'harmonyseq', 'afflib', 'libopenraw', 'coin-or-Alps', 'ccgo', 'cfdg',

+         'linbox', 'recoll', 'votca-csg', 'flxmlrpc', 'coin-or-CoinUtils', 'stage', 'glogg', 'mongo-cxx-driver',

+         'libmediainfo', 'gsmartcontrol', 'ochusha', 'verilator', 'rb_libtorrent', 'rmol', 'exempi', 'pdns-recursor',

+         'gparted', 'ardour2', 'librime', 'mm3d', 'qt5-qtconnectivity', 'airtsp', 'normaliz', 'vulkan', 'kf5-kio',

+         'gpsim', 'monotone', 'mmapper', 'ember', 'qpdf', 'wireshark', 'mapserver', 'filezilla', 'extremetuxracer',

+         'mypaint', 'alliance', 'kstars', 'qgis', 'ardour5', 'opencv', 'qcad', 'passenger', 'calf', 'qt5-qtbase',

+         'kopete', 'digikam', 'nodejs-mapnik', 'stellarium', 'opencity', 'icecat', 'qm-vamp-plugins', 'pingus',

+         'abiword', 'goldendict', 'gtengine', 'condor', 'cloudy', 'healpix', 'latte-integrale', 'celestia', 'din',

+         'perl-Boost-Geometry-Utils', 'libpar2', 'libvisio', 'coin-or-Bcp', 'love', '3Depict', 'flrig', 'libpagemaker',

+         'pan', 'engauge-digitizer', 'libzmf', 'scorchwentbonkers', 'libfilezilla', 'v4l-utils', 'gdl', 'uhd',

+         'libfreehand', 'llvm3.9', 'sword', 'nextcloud-client', 'protobuf', 'crawl', 'libmspub', 'pynac', 'istatd',

+         'kblocks', 'libetonyek', 'kf5-knotifications', 'fawkes', 'minion', 'tcpflow', 'ignition-math', 'libwps', 'fcl',

+         'ergo', 'libpwiz', 'endless-sky', 'webkitgtk4', 'apitrace', 'libghemical', 'coin-or-OS', 'binutils',

+         'coin-or-Dip', 'adanaxisgpl', 'extundelete', 'synfig', 'stxxl', 'libminc', 'cairomm', 'amftools',

+         'mingw-qt5-qtbase', 'gnuplot', 'tellico', 'rdfind', 'freefem++', 'rocs', 'kcachegrind', 'rosegarden4',

+         'libmp4v2', 'qesteidutil', 'qcustomplot', 'code-editor', 'rcsslogplayer', 'pulseview', 'libdxfrw', 'mlpack',

+         'worker', 'aseman-qt-tools', 'stockfish', 'massif-visualizer', 'cryptominisat', 'powertop', 'usbguard',

+         'thrift', 'mmseq', 'fparser', 'audacity', 'poedit', 'panoglview', 'sonic-visualiser', 'mathex', 'getdp',

+         'librealsense', 'onboard', 'libgltf', 'aria2', 'innoextract', 'fritzing', 'flamerobin', 'gambas3', 'openmsx',

+         'oxygen-gtk2', 'asymptote', 'repsnapper', 'openscad', 'givaro', 'ompl', 'bionetgen', 'xrootd', 'qt5-qt3d',

+         'coot', 'glob2', 'nyquist', 'avogadro2-libs', 'cmake', 'libcec', 'qwtplot3d', 'kdevplatform', 'xapian-core',

+         'zpaq', 'libstaroffice', 'link-grammar', 'synergy', 'dasher', 'vcftools', 'ldc', 'lnav', 'coin-or-Couenne',

+         'dwlocstat', 'gimagereader', 'zimlib', 'assimp', 'coin-or-lemon', 'wxGTK3', 'sdformat', 'liblsl', 'faust',

+         'hugin', 'coin-or-Cgl', 'rtorrent', 'dssp', 'zhu3d', 'botan', 'podofo', 'xfce4-hardware-monitor-plugin',

+         'libffado', 'grive2', 'coin-or-Ipopt', 'jmtpfs', 'flare-engine', 'lhapdf', 'airinv', 'liborigin2', 'vdrift',

+         'xylib', 'libcdr', 'plasma-workspace', 'adevs', 'plee-the-bear', 'libkdtree++', 'votca-xtp', 'ginac',

+         'libdigidocpp', 'qt', 'mapnik', 'hyperrogue', 'antimony', 'qlandkartegt', 'zorba', 'gdb', 'orocos-kdl',

+         'libmwaw', 'cube', 'libfplll', 'rawtherapee', 'skyviewer', 'xsd', 'stdair', 'tarantool', 'wt', 'zeromq',

+         'votca-tools', 'par2cmdline', 'leveldb', 'vfrnav', 'ispc', 'kst', 'heaptrack', 'brial', 'wcm', 'muse',

+         'kf5-akonadi-server', 'yadex', 'lifeograph', 'scantailor', 'erlang-eleveldb', 'fmt', 'osmium-tool', 'libcmis',

+         'exiv2', 'ghemical', 'gwenview', 'scram', 'step', 'qt5-qtlocation', 'icedtea-web', 'stk', 'vxl', 'gimp-dbp',

+         'shiboken', 'fldigi', 'coin-or-Cbc', 'gxemul', 'mathicgb', 'orsa', 'dnsdist', 'wfmath', 'brewtarget',

+         'compat-wxGTK3-gtk2', 'gtatool', 'flann', 'simple-mtpfs', 'codeblocks', 'povray', 'qtwebkit', 'texstudio',

+         'sleuthkit', 'openoffice.org-diafilter', 'libndn-cxx', 'patchelf', 'qt5-qtwebkit', 'cvc4', 'gjs', 'figtoipe',

+         'dvgrab', 'adobe-source-libraries', 'galera', 'sockperf', 'pgRouting', 'ceph', 'community-mysql', 'mecab',

+         'kf5-kactivities-stats', 'libwpd', 'erlang-basho_metrics', 'libodb', 'libjson-rpc-cpp', 'vdr-vnsiserver',

+         'guitarix', 'gmsh', 'gqrx', 'cryptominisat4', 'openCOLLADA', 'coin-or-FlopC++', 'ncrack', 'crrcsim', 'kdevelop',

+         'enigma', 'yosys', 'mpqc', 'libjingle', 'ocrad', 'mysql++', 'swig', 'dosbox', 'fityk', 'gnucap', 'xplanet',

+         'frepple', 'zeitgeist', 'beediff', 'extrema', 'coin-or-Bonmin', 'lldb', 'wxmacmolplt', 'kalarm', 'lilypond',

+         'stardict', 'gobby05', 'tapkee', 'qdigidoc', 'slic3r-prusa3d', 'coin-or-Bcps', 'slic3r', 'openigtlink',

+         'engrid', 'psi4', 'musique', 'lziprecover', 'qblade', 'voro++', 'pdfedit', 'libwpg', 'coin-or-Clp',

+         'pavucontrol', 'lld', 'indi-eqmod', 'openlierox', 'lshw', 'barry', 'gazebo', 'tomahawk', 'kismet',

+         'polyclipping', 'libQGLViewer', 'opencc', 'ceres-solver', 'vips', 'procinfo-ng', 'liborigin', 'eclib',

+         'chromaprint', 'libzypp', 'coin-or-Blis', 'owncloud-client', 'pcb2gcode', 'muParser', 'cxsc', 'task', 'xtide',

+         'fmit', 'mariadb', 'libint2', 'supertux', 'libabigail', 'krita', 'btbuilder', 'python-mapnik', 'freeorion',

+         'kicad', 'asgp', 'root', 'mingw-qt', 'supertuxkart', 'gammaray', 'warsow', 'warzone2100', 'sagemath',

+         'cross-gcc', 'lincity-ng', 'widelands', 'vtk', 'texlive', 'insight', 'marble-widget', 'zaz',

+         'marble-subsurface', 'percona-xtrabackup', 'polymake', 'blender', 'mscore', 'efl', 'marsshooter', 'seqan',

+         'mozjs45', 'thunderbird']

  

  print('Checking %s packages...' % len(pkgs))

  
@@ -100,8 +153,8 @@ 

          continue

  

      # Check for a noautobuild file

-     #f os.path.exists(os.path.join(workdir, name, 'noautobuild')):

-         # Maintainer does not want us to auto build.

+     # f os.path.exists(os.path.join(workdir, name, 'noautobuild')):

+     # Maintainer does not want us to auto build.

      #   print('Skipping %s due to opt-out' % name)

      #   continue

  
@@ -112,8 +165,8 @@ 

      if masterhash == 0:

          sys.stderr.write('%s has no git hash.\n' % name)

          break

-  

-     gitcmd = ['git', 'rev-parse', 'origin/%s' % secondbuildtag ]

+ 

+     gitcmd = ['git', 'rev-parse', 'origin/%s' % secondbuildtag]

      print('getting git hash for %s' % secondbuildtag)

      secondhash = runmeoutput(gitcmd, 'git', name, enviro, cwd=os.path.join(workdir, name))

      if secondhash == 0:
@@ -128,7 +181,7 @@ 

  

          if branch == secondbuildtag:

              # switch branch

-             fedpkgcmd = ['fedpkg', 'switch-branch', secondbuildtag ]

+             fedpkgcmd = ['fedpkg', 'switch-branch', secondbuildtag]

              print('switching %s to %s' % (name, secondbuildtag))

              if runme(fedpkgcmd, 'fedpkg', name, enviro, cwd=os.path.join(workdir, name)):

                  continue
@@ -157,13 +210,13 @@ 

              commit = ['fedpkg', 'commit', '-p', '-m', comment]

              print('Committing changes for %s' % name)

              if runme(commit, 'commit', name, enviro,

-                          cwd=os.path.join(workdir, name)):

+                      cwd=os.path.join(workdir, name)):

                  continue

          else:

              gitmergecmd = ['git', 'merge', 'master']

              print("merging master into %s" % secondbuildtag)

              if runme(gitmergecmd, 'git', name, enviro,

-                          cwd=os.path.join(workdir, name)):

+                      cwd=os.path.join(workdir, name)):

                  continue

              # git push

              push = ['git', 'push']
@@ -175,5 +228,5 @@ 

          # build

          build = ['fedpkg', 'build', '--nowait', '--background', '--target', target]

          print('Building %s' % name)

-         runme(build, 'build', name, enviro, 

+         runme(build, 'build', name, enviro,

                cwd=os.path.join(workdir, name))

file modified
+8 -7
@@ -47,9 +47,10 @@ 

          return 1

      return 0

  

+ 

  # This function needs a dry-run like option

  def runmeoutput(cmd, action, pkg, env, cwd=workdir):

-     """Simple function to run a command and return output if successful. 

+     """Simple function to run a command and return output if successful.

         cmd is a list of the command and arguments, action is a

         name for the action (for logging), pkg is the name of the package

         being operated on, env is the environment dict, and cwd is where
@@ -99,7 +100,7 @@ 

      for build in builds:

          try:

              buildtarget = kojisession.getTaskInfo(build['task_id'],

-                                        request=True)['request'][1]

+                                                   request=True)['request'][1]

              if buildtarget == massrebuild['target'] or buildtarget in massrebuild['targets']:

                  # We've already got an attempt made, skip.

                  newbuild = True
@@ -152,29 +153,29 @@ 

      set_mail = ['git', 'config', 'user.email', 'releng@fedoraproject.org']

      print('Setting git user.name and user.email')

      if runme(set_name, 'set_name', name, enviro,

-                  cwd=os.path.join(workdir, name)):

+              cwd=os.path.join(workdir, name)):

          continue

      if runme(set_mail, 'set_mail', name, enviro,

-                  cwd=os.path.join(workdir, name)):

+              cwd=os.path.join(workdir, name)):

          continue

  

      # git commit

      commit = ['fedpkg', 'commit', '-s', '-p', '-m', comment]

      print('Committing changes for %s' % name)

      if runme(commit, 'commit', name, enviro,

-                  cwd=os.path.join(workdir, name)):

+              cwd=os.path.join(workdir, name)):

          continue

  

      # get git url

      urlcmd = ['fedpkg', 'giturl']

      print('Getting git url for %s' % name)

      url = runmeoutput(urlcmd, 'giturl', name, enviro,

-                  cwd=os.path.join(workdir, name))

+                       cwd=os.path.join(workdir, name))

      if not url:

          continue

  

      # build

      build = [koji_bin, 'build', '--nowait', '--background', massrebuild['target'], url]

      print('Building %s' % name)

-     runme(build, 'build', name, enviro, 

+     runme(build, 'build', name, enviro,

            cwd=os.path.join(workdir, name))

file modified
+10 -10
@@ -18,13 +18,13 @@ 

  # Set some variables

  # Some of these could arguably be passed in as args.

  parser = argparse.ArgumentParser()

- parser.add_argument('-t','--target', help='Tag to tag the builds into',required=True)

- parser.add_argument('-s','--source',help='Tag holding the builds', required=True)

+ parser.add_argument('-t', '--target', help='Tag to tag the builds into', required=True)

+ parser.add_argument('-s', '--source', help='Tag holding the builds', required=True)

  args = parser.parse_args()

- target = args.target # tag to tag into

- holdingtag = args.source # tag holding the rebuilds

- newbuilds = {} # dict of packages that have a newer build attempt

- tasks = {} # dict of new build task info

+ target = args.target  # tag to tag into

+ holdingtag = args.source  # tag holding the rebuilds

+ newbuilds = {}  # dict of packages that have a newer build attempt

+ tasks = {}  # dict of new build task info

  

  # Create a koji session

  koji_module = koji.get_profile_module("fedora")
@@ -51,7 +51,7 @@ 

  

  results = kojisession.multiCall()

  

- for build, [result] in zip (builds, results):

+ for build, [result] in zip(builds, results):

      build['task_creation_time'] = result['create_time']

  

  # Use multicall
@@ -102,8 +102,9 @@ 

          for newbuild in newbuilds[build['package_name']]:

              # Scrape the task info out of the tasks dict from the newbuild task ID

              try:

-                 if tasks[newbuild['task_id']]['request'][1] in (target, '%s-candidate' % target, 'rawhide', 'dist-rawhide') \

-                 and newbuild['state'] == 1:

+                 if tasks[newbuild['task_id']]['request'][1] in (

+                         target, '%s-candidate' % target, 'rawhide', 'dist-rawhide') \

+                         and newbuild['state'] == 1:

                      print('Newer build found for %s.' % build['package_name'])

                      newer = True

                      break
@@ -124,4 +125,3 @@ 

  print('Tagging %s builds.' % pkgcount)

  results = kojisession.multiCall()

  print('Tagged %s builds.' % len(taglist))

- 

@@ -24,30 +24,29 @@ 

  from massrebuildsinfo import MASSREBUILDS

  

  rebuildid = 'f30'

- failures = {} # dict of owners to lists of packages that failed.

- failed = [] # raw list of failed packages

+ failures = {}  # dict of owners to lists of packages that failed.

+ failed = []  # raw list of failed packages

  

  bzurl = 'https://bugzilla.redhat.com'

  BZCLIENT = RHBugzilla(url="%s/xmlrpc.cgi" % bzurl,

                        user="releng@fedoraproject.org")

  

  DEFAULT_COMMENT = \

- """{component} failed to build from source in {product} {version}/f{rawhide_version}

- 

- https://koji.fedoraproject.org/koji/taskinfo?taskID={task_id}

- {extrainfo}

- 

- For details on the mass rebuild see:

- 

- {wikipage}

- Please fix {component} at your earliest convenience and set the bug's status to

- ASSIGNED when you start fixing it. If the bug remains in NEW state for 8 weeks,

- {component} will be orphaned. Before branching of {product} {nextversion},

- {component} will be retired, if it still fails to build.

+     """{component} failed to build from source in {product} {version}/f{rawhide_version}

+     https://koji.fedoraproject.org/koji/taskinfo?taskID={task_id}

+     {extrainfo}

+ 

+     For details on the mass rebuild see:

+ 

+     {wikipage}

+     Please fix {component} at your earliest convenience and set the bug's status to

+     ASSIGNED when you start fixing it. If the bug remains in NEW state for 8 weeks,

+     {component} will be orphaned. Before branching of {product} {nextversion},

+     {component} will be retired, if it still fails to build.

+     For more details on the FTBFS policy, please visit:

+     https://fedoraproject.org/wiki/Fails_to_build_from_source

+     """

  

- For more details on the FTBFS policy, please visit:

- https://fedoraproject.org/wiki/Fails_to_build_from_source

- """

  

  def report_failure(massrebuild, component, task_id, logs,

                     summary="{component}: FTBFS in {product} {version}/f{rawhide_version}",
@@ -85,8 +84,7 @@ 

              'op_sys': 'Unspecified',

              'bug_file_loc': '',

              'priority': 'unspecified',

-            }

- 

+             }

  

      try:

          print('Creating the bug report')
@@ -96,11 +94,11 @@ 

          attach_logs(bug, logs)

      except Fault as ex:

          print(ex)

-         #Because of having image build requirement of having the image name in koji

-         #as a package name, they are missing in the components of koji and we need

-         #to skip them.

+         # Because of having image build requirement of having the image name in koji

+         # as a package name, they are missing in the components of koji and we need

+         # to skip them.

          if ex.faultCode == 51:

-         #if "There is no component" in ex.faultString:

+             # if "There is no component" in ex.faultString:

              print(ex.faultString)

              return None

          else:
@@ -111,8 +109,8 @@ 

                                    comment)

      return bug

  

- def attach_logs(bug, logs):

  

+ def attach_logs(bug, logs):

      if isinstance(bug, int):

          bug = BZCLIENT.getbug(bug)

  
@@ -121,8 +119,8 @@ 

          try:

              response = urllib2.urlopen(log)

          except urllib2.HTTPError, e:

-             #sometimes there wont be any logs attached to the task.

-             #skip attaching logs for those tasks

+             # sometimes there wont be any logs attached to the task.

+             # skip attaching logs for those tasks

              if e.code == 404:

                  print("Failed to attach {} log".format(name))

                  continue
@@ -156,13 +154,14 @@ 

                  bug.id, fp, name, content_type='text/plain', file_name=name,

                  comment=comment

              )

-         except Fault as  ex:

+         except Fault as ex:

              print(ex)

              raise

  

          finally:

              fp.close()

  

+ 

  def get_filed_bugs(tracking_bug):

      """Query bugzilla if given bug has already been filed

  
@@ -180,6 +179,7 @@ 

  

      return bzclient.query(query_data)

  

+ 

  def get_task_failed(kojisession, task_id):

      ''' For a given task_id, use the provided kojisession to return the

      task_id of the first children that failed to build.
@@ -218,7 +218,6 @@ 

              state_log = log_url + "state.log"

              logs = [build_log, root_log, state_log]

  

- 

          if component not in filed_bugs_components:

              print("Filing bug for %s" % component)

              report_failure(massrebuild, component, task_id, logs)

@@ -24,8 +24,8 @@ 

  cachefile = "/var/tmp/find-unsigned-modules-cache.dbm"

  cache = dogpile.cache.make_region().configure(

      'dogpile.cache.dbm',

-      expiration_time=600,

-      arguments={"filename": cachefile},

+     expiration_time=600,

+     arguments={"filename": cachefile},

  )

  

  
@@ -95,7 +95,7 @@ 

      for module in modules:

          if module['signed']:

              print("+", module['variant_uid'], "with tag", module['koji_tag'],

-                 "is signed with", ', '.join(module['signed']))

+                   "is signed with", ', '.join(module['signed']))

          else:

-             print("-", module['variant_uid'], "with tag", module['koji_tag'],\

-                 "is not signed.")

+             print("-", module['variant_uid'], "with tag", module['koji_tag'],

+                   "is not signed.")

file modified
+10 -11
@@ -19,23 +19,23 @@ 

  

  # Set some variables

  # Some of these could arguably be passed in as args.

- buildtag = 'f30-rebuild' # tag(s) to check

+ buildtag = 'f30-rebuild'  # tag(s) to check

  target = 'f30'

  updates = 'f30-candidate'

- rawhide = 'rawhide' # Change to dist-f13 after we branch

- epoch = '2019-01-31 10:10:00.000000' # rebuild anything not built after this date

- tobuild = {} # dict of owners to lists of packages needing to be built

- unbuilt = [] # raw list of unbuilt packages

+ rawhide = 'rawhide'  # Change to dist-f13 after we branch

+ epoch = '2019-01-31 10:10:00.000000'  # rebuild anything not built after this date

+ tobuild = {}  # dict of owners to lists of packages needing to be built

+ unbuilt = []  # raw list of unbuilt packages

  newbuilds = {}

  tasks = {}

  # List of Kojihubs to be searched

  kojihubs = [

- 'https://koji.fedoraproject.org/kojihub',

+     'https://koji.fedoraproject.org/kojihub',

  ]

  

- #'http://sparc.koji.fedoraproject.org/kojihub',

- def needRebuild(kojihub):

  

+ # 'http://sparc.koji.fedoraproject.org/kojihub',

+ def needRebuild(kojihub):

      # Create a koji session

      kojisession = koji.ClientSession(kojihub)

  
@@ -73,10 +73,9 @@ 

          print("Failed to get the build request information: %s (skipping)" % kojihub, file=sys.stderr)

          return -1

  

- 

      # Populate the task info dict

      for request in requests:

-         if len(request) > 1 or request[0] == None:

+         if len(request) > 1 or request[0] is None:

              continue

          tasks[request[0]['id']] = request[0]

  
@@ -94,6 +93,7 @@ 

              unbuiltnew.append(pkg['package_name'])

      return set(unbuiltnew)

  

+ 

  now = datetime.datetime.now()

  now_str = "%s UTC" % str(now.utcnow())

  print('<html><head>')
@@ -123,7 +123,6 @@ 

      if len(tobuild[owner]) == 0:

          del tobuild[owner]

  

- 

  print("<p>%s packages need rebuilding:</p><hr/>" % len(unbuilt))

  

  # Print the results

@@ -11,16 +11,15 @@ 

  import argparse

  

  parser = argparse.ArgumentParser()

- parser.add_argument('token', help='PDC token for authentication.')                                                

- parser.add_argument('branch', help='Name of the branch (f26, or 1.12, or master)')                                

- parser.add_argument('eol', help='End of life date for the SLAs, '                                                 

- 'in the format of "2020-01-01".')

- parser.add_argument('stg', help='Use Staging PDC', default = False)

+ parser.add_argument('token', help='PDC token for authentication.')

+ parser.add_argument('branch', help='Name of the branch (f26, or 1.12, or master)')

+ parser.add_argument('eol', help='End of life date for the SLAs, in the format of "2020-01-01".')

+ parser.add_argument('stg', help='Use Staging PDC', default=False)

  

  args = parser.parse_args()

  

  if __name__ == '__main__':

-     token = args.token 

+     token = args.token

      eol_date = args.eol

      branch = args.branch

      if args.stg:
@@ -28,7 +27,7 @@ 

      else:

          pdc = 'https://pdc.fedoraproject.org/'

      sla_ids = set()

-     url = '{0}/rest_api/v1/component-branch-slas/?page_size=100&branch={1}'.format(pdc,branch)

+     url = '{0}/rest_api/v1/component-branch-slas/?page_size=100&branch={1}'.format(pdc, branch)

      while True:

          rv = requests.get(url)

          if not rv.ok:
@@ -42,9 +41,8 @@ 

      eol_data = json.dumps({'eol': eol_date})

      headers = {'Content-Type': 'application/json', 'Authorization': 'token {0}'.format(token)}

      for sla_id in sla_ids:

-         sla_url = '{0}/rest_api/v1/component-branch-slas/{1}/'.format(pdc,sla_id)

+         sla_url = '{0}/rest_api/v1/component-branch-slas/{1}/'.format(pdc, sla_id)

          print(sla_url)

          rv = requests.patch(sla_url, headers=headers, data=eol_data)

          if not rv.ok:

              raise RuntimeError('Failed with: {0}'.format(rv.text))

- 

@@ -57,8 +57,8 @@ 

          package = branch['global_component']

          package_type = branch['type']

          critpath = branch['critical_path']

-         #Skip the packages that start with 'rust-'

-         #https://pagure.io/fesco/issue/2068

+         # Skip the packages that start with 'rust-'

+         # https://pagure.io/fesco/issue/2068

          if not package.startswith('rust-'):

              active_components.add(

                  '%s/%s' % (package_type, package))

@@ -39,7 +39,6 @@ 

  parser.add_argument('namespace', help='PkgDB namespace, e.g.: rpms, modules, container')

  args = parser.parse_args()

  

- 

  # These are old, and I don't know what to do with them.  When was their EOL?

  ignored_branches = [

      'RHL-9',
@@ -69,7 +68,7 @@ 

          if tries >= 5:

              raise IOError("Tried 5 times.  Giving up.")

          print("  ! Failed, %r, %i times.  Trying again." % (response, tries))

-         return _pkgdb_data_by_page(page, namespace, tries+1)

+         return _pkgdb_data_by_page(page, namespace, tries + 1)

      print("  pkgdb query took %r seconds" % (time.time() - start))

      data = response.json()

  
@@ -137,6 +136,7 @@ 

      }

      return lookup[branchname]

  

+ 

  def get_implicit_eol(branchname):

      lookup = {

          'master': '2222-01-01',  # THE FUTURE!
@@ -177,6 +177,7 @@ 

      }

      return lookup[branchname]

  

+ 

  def lookup_component_type(pkgdb_type):

      lookup = {

          'rpms': 'rpm',
@@ -184,7 +185,7 @@ 

          'container': 'container',

      }

      # Just to be verbose for users...

-     if not pkgdb_type in lookup:

+     if pkgdb_type not in lookup:

          raise KeyError("%r not in %r" % (pkgdb_type, lookup.keys()))

      return lookup[pkgdb_type]

  
@@ -218,6 +219,7 @@ 

              force=True,

          )

  

+ 

  if __name__ == '__main__':

      # Make sure the given namespace is a real namespace.

      lookup_component_type(args.namespace)
@@ -227,6 +229,7 @@ 

  

      # Set up N workers to pull work from a queue of pkgdb entries

      N = 10

+ 

      def pull_work():

          while True:

              print("Worker found %i items on the queue" % q.qsize())
@@ -235,6 +238,7 @@ 

                  print("Worker found StopIteration.  Shutting down.")

                  break

              do_work(entry)

+ 

      workers = [threading.Thread(target=pull_work) for i in range(N)]

      for worker in workers:

          worker.start()

@@ -78,7 +78,6 @@ 

          print("Did not apply any slas to %r (critpath %r)" % (base, critpath))

  

  

- 

  def patch_eol(pdc, package, eol, branch, type, force):

      specified_eol = eol

      endpoint = pdc['component-branch-slas']

file modified
+7 -8
@@ -24,26 +24,25 @@ 

  # that koji would use.  We should get this from sigul somehow.

  

  # Create a parser to parse our arguments

- parser = argparse.ArgumentParser(usage = '%(prog)s [options] tag')

+ parser = argparse.ArgumentParser(usage='%(prog)s [options] tag')

  parser.add_argument('-v', '--verbose', action='count', default=0,

-                   help='Be verbose, specify twice for debug')

+                     help='Be verbose, specify twice for debug')

  parser.add_argument('-n', '--dry-run', action='store_true', default=False,

-                   help='Perform a dry run without untagging')

+                     help='Perform a dry run without untagging')

  parser.add_argument('-p', '--koji-profile', default="fedora",

-                   help='Select a koji profile to use')

+                     help='Select a koji profile to use')

  

  KOJIHUB = args.koji_profile

  # Get our options and arguments

- args, extras =  parser.parse_known_args()

+ args, extras = parser.parse_known_args()

  

  if args.verbose <= 0:

      loglevel = logging.WARNING

  elif args.verbose == 1:

-     loglevel = logging.INFO 

- else: # options.verbose >= 2

+     loglevel = logging.INFO

+ else:  # options.verbose >= 2

      loglevel = logging.DEBUG

  

- 

  logging.basicConfig(format='%(levelname)s: %(message)s',

                      level=loglevel)

  

file modified
+30 -28
@@ -52,7 +52,7 @@ 

  

  # Possible image types available for Atomic Host

  IMAGE_TYPES = ['qcow2', 'raw-xz', 'vagrant-libvirt',

-                  'vagrant-virtualbox', 'dvd-ostree']

+                'vagrant-virtualbox', 'dvd-ostree']

  

  # Mapping for image types to keep it consistent with twoweek fedmsg sent in past

  IMAGE_TYPES_MAPPING = {
@@ -66,9 +66,9 @@ 

  PREVIOUS_MAJOR_RELEASE_FINAL_COMMITS = {

      'aarch64': None,

      'ppc64le': None,

-     'x86_64':  None,

+     'x86_64': None,

  }

- TARGET_REF = "fedora/%s/%s/atomic-host" # example fedora/27/x86_64/atomic-host

+ TARGET_REF = "fedora/%s/%s/atomic-host"  # example fedora/27/x86_64/atomic-host

  DEFAULT_COMPOSE_BASEDIR = "/mnt/koji/compose/updates/"

  

  # FIXME ???? Do we need a real STMP server here?
@@ -105,7 +105,6 @@ 

  # url: https://apps.fedoraproject.org/datagrepper/raw?topic=org.fedoraproject.prod.pungi.compose.ostree

  DATAGREPPER_OSTREE_TOPIC = "org.fedoraproject.prod.pungi.compose.ostree"

  

- 

  SIGUL_SIGNED_TXT_PATH = "/tmp/signed"

  

  # Number of atomic testing composes to keep around
@@ -139,7 +138,7 @@ 

      # Start with page 1 response from datagrepper, grab the raw messages

      # and then continue to populate the list with the rest of the pages of data

      ostree_data = r.json()[u'raw_messages']

-     for rpage in range(2, r.json()[u'pages']+1):

+     for rpage in range(2, r.json()[u'pages'] + 1):

          ostree_data += requests.get(

              datagrepper_url,

              params=dict(page=rpage, **request_params)
@@ -147,8 +146,7 @@ 

  

      ostree_composes = [

          compose[u'msg'] for compose in ostree_data

-         if ostree_pungi_compose_id in compose[u'msg'][u'compose_id']

-             and 'atomic-host' in compose[u'msg'][u'ref']

+         if ostree_pungi_compose_id in compose[u'msg'][u'compose_id'] and 'atomic-host' in compose[u'msg'][u'ref']

      ]

  

      ostree_compose_info = dict()
@@ -160,8 +158,8 @@ 

  

      return ostree_compose_info

  

- def get_release_artifacts_info_from_compose(pungi_compose_id):

  

+ def get_release_artifacts_info_from_compose(pungi_compose_id):

      """

      :param pungi_compose_id: Pungi Compose ID considered for twoweek release

      :return: dict - Returns images detail for all supported arches for given
@@ -235,7 +233,7 @@ 

                  if image_detail['type'] in IMAGE_TYPES:

                      image_url = os.path.join(ATOMIC_HOST_STABLE_BASEDIR,

                                               compose_id,

-                                              image_detail[u'path'] )

+                                              image_detail[u'path'])

                      image_name = image_detail[u'path'].split('/')[-1]

                      name = image_name.split('.' + arch)[0]

                      release_artifact_info[IMAGE_TYPES_MAPPING[image_detail[u'type']]] = {
@@ -254,6 +252,7 @@ 

  

      return release_artifacts_info

  

+ 

  def send_atomic_announce_email(

          email_filelist,

          ostree_commit_data,
@@ -281,7 +280,7 @@ 

      released_artifacts.sort()

      released_checksums.sort()

  

-     commits_string =""

+     commits_string = ""

      for arch in ARCHES:

          commit = ostree_commit_data[arch]['commit']

          commits_string += "Commit(%s): %s\n" % (arch, commit)
@@ -290,7 +289,7 @@ 

      msg['To'] = "; ".join(mail_receivers)

      msg['From'] = "noreply@fedoraproject.org"

      msg['Subject'] = "Fedora Atomic Host Two Week Release Announcement: %s" % \

-                          ostree_commit_data.items()[0][1]['version']

+                      ostree_commit_data.items()[0][1]['version']

      msg.attach(

          MIMEText(

              """
@@ -381,6 +380,7 @@ 

      except smtplib.SMTPException as e:

          print("ERROR: Unable to send email:\n{}\n".format(e))

  

+ 

  def stage_atomic_release(

          pungi_compose_id,

          compose_basedir=DEFAULT_COMPOSE_BASEDIR,
@@ -413,6 +413,7 @@ 

          )

          exit(3)

  

+ 

  def sign_checksum_files(

          key,

          artifact_path,
@@ -472,7 +473,7 @@ 

              )

  

          if subprocess.call(

-             "chgrp releng-team {}".format(signed_txt_path).split()

+                 "chgrp releng-team {}".format(signed_txt_path).split()

          ):

              log.error(

                  "sign_checksum_files: chgrp releng-team {}".format(
@@ -482,7 +483,7 @@ 

              sys.exit(3)

  

          if subprocess.call(

-             "chmod 664 {}".format(signed_txt_path).split()

+                 "chmod 664 {}".format(signed_txt_path).split()

          ):

              log.error(

                  "sign_checksum_files: chmod 644 {}".format(
@@ -493,8 +494,8 @@ 

  

          # FIXME - need sudo until new pungi perms are sorted out

          if subprocess.call(

-             #["sg", "releng-team", "'mv {} {}'".format(signed_txt_path, cfile)]

-             "sudo mv {} {}".format(signed_txt_path, cfile).split()

+                 # ["sg", "releng-team", "'mv {} {}'".format(signed_txt_path, cfile)]

+                 "sudo mv {} {}".format(signed_txt_path, cfile).split()

          ):

              log.error(

                  "sign_checksum_files: sudo sg releng-team 'mv {} {}' FAILED".format(
@@ -551,11 +552,11 @@ 

          prune_candidate_dirs.reverse()

  

          for candidate_dir in prune_candidate_dirs[0:prune_limit]:

-             #try:

+             # try:

              #    shutil.rmtree(

              #        os.path.join(prune_base_dir, candidate_dir)

              #    )

-             #except OSError, e:

+             # except OSError, e:

              #    log.error(

              #        "Error trying to remove directory: {}\n{}".format(

              #            candidate_dir,
@@ -563,7 +564,7 @@ 

              #        )

              #    )

  

-             #FIXME - need to do this with sudo until pungi perms are fixed

+             # FIXME - need to do this with sudo until pungi perms are fixed

              prune_cmd = "sudo rm -fr {}".format(

                  os.path.join(

                      prune_base_dir,
@@ -575,6 +576,7 @@ 

                      "prune_old_composes: command failed: {}".format(prune_cmd)

                  )

  

+ 

  def generate_static_delta(old_commit, new_commit):

      """

      generate_static_delta
@@ -593,6 +595,7 @@ 

          log.error("generate_static_delta: diff generation failed: %s", diff_cmd)

          exit(3)

  

+ 

  def update_ostree_summary_file():

      """

      update_ostree_summary_file
@@ -609,6 +612,7 @@ 

          log.error("update_ostree_summary_file: update failed: %s", summary_cmd)

          exit(3)

  

+ 

  def update_ref(ref, old_commit, new_commit):

      """

      update_ref
@@ -623,11 +627,11 @@ 

      if old_commit == new_commit:

          log.info("ref %s is already at %s. Skipping update",

                   ref, new_commit

-         )

+                  )

          return

  

      log.info("Moving ref %s from %s => %s",

-               ref, old_commit, new_commit)

+              ref, old_commit, new_commit)

  

      reset_cmd = ['/usr/bin/sudo', 'ostree', 'reset', ref,

                   new_commit, '--repo', ATOMIC_REPO]
@@ -636,8 +640,6 @@ 

          sys.exit(3)

  

  

- 

- 

  if __name__ == '__main__':

  

      # get args from command line
@@ -701,10 +703,10 @@ 

      else:

          compose_basedir = DEFAULT_COMPOSE_BASEDIR

  

-     log.info("Fetching images information for Compose ID %s", pargs.pungi_compose_id )

+     log.info("Fetching images information for Compose ID %s", pargs.pungi_compose_id)

      # Get image artifacts information for given Pungi Compose ID

      release_artifacts_info = get_release_artifacts_info_from_compose(

-                                 pargs.pungi_compose_id)

+         pargs.pungi_compose_id)

      log.info("{}\n{}".format("RELEASE_ARTIFACTS_INFO", json.dumps(release_artifacts_info, indent=2)))

  

      log.info("Fetching images information from compose ID %s complete", pargs.pungi_compose_id)
@@ -735,7 +737,7 @@ 

      for arch in ARCHES:

          if arch not in ostree_compose_info.keys():

              log.error("No compose commit info for %s in %s",

-                         arch, pargs.ostree_pungi_compose_id)

+                       arch, pargs.ostree_pungi_compose_id)

              sys.exit(2)

  

      # populate the ostree_commit_data dict
@@ -849,15 +851,15 @@ 

      # On the first release send only to "FIRST_RELEASE" list

      if pargs.first_release:

          mail_receivers = ATOMIC_HOST_FIRST_RELEASE_MAIL_RECIPIENTS

-     else: 

+     else:

          mail_receivers = ATOMIC_HOST_EMAIL_RECIPIENTS

      send_atomic_announce_email(set(email_filelist),

                                 ostree_commit_data,

                                 mail_receivers=mail_receivers)

  

      # FIXME - The logic in this function is broken, leave it disabled for now

-     #log.info("Pruning old Atomic Host test composes")

-     #prune_old_composes(ATOMIC_HOST_STABLE_BASEDIR, 2)

+     # log.info("Pruning old Atomic Host test composes")

+     # prune_old_composes(ATOMIC_HOST_STABLE_BASEDIR, 2)

  

      log.info("Two Week Atomic Host Release Complete!")

  

file modified
+53 -50
@@ -16,22 +16,21 @@ 

  import sys

  import re

  

- 

  CANONARCHES = ['arm', 'ppc', 's390']

  ARCHES = {'arm': ['arm', 'armhfp', 'aarch64'],

            'ppc': ['ppc', 'ppc64', 'ppc64le'],

            's390': ['s390', 's390x']}

- #TARGETPATH = '/srv/pub/fedora-secondary/test/'

+ # TARGETPATH = '/srv/pub/fedora-secondary/test/'

  TARGETPATH = '/srv/pub/fedora-secondary/'

  

  

- 

  # Setup our logger

  # Null logger to avoid spurrious messages, add a handler in app code

  class NullHandler(logging.Handler):

      def emit(self, record):

          pass

  

+ 

  h = NullHandler()

  

  # This is our log object,
@@ -50,7 +49,7 @@ 

          # These direct us to talk to a fedmsg-relay living somewhere.

          active=True,

          name="relay_inbound",

-         )

+     )

      fedmsg.publish(**kwargs)

  

  
@@ -75,7 +74,7 @@ 

      if env:

          for item in env.keys():

              log.debug('Adding %s:%s to the environment' %

-                            (item, env[item]))

+                       (item, env[item]))

              environ[item] = env[item]

      # Check if we're supposed to be on a shell.  If so, the command must

      # be a string, and not a list.
@@ -88,10 +87,10 @@ 

      if sys.stdout.isatty():

          if pipe:

              log.debug('Running %s | %s directly on the tty' %

-                            (' '.join(cmd), ' '.join(pipe)))

+                       (' '.join(cmd), ' '.join(pipe)))

          else:

              log.debug('Running %s directly on the tty' %

-                             ' '.join(cmd))

+                       ' '.join(cmd))

          try:

              if pipe:

                  # We're piping the stderr over too, which is probably a
@@ -109,7 +108,7 @@ 

                                        cwd=cwd)

                  (output, err) = proc.communicate()

                  if proc.returncode:

-                    print('Non zero exit')

+                     print('Non zero exit')

              else:

                  proc = subprocess.Popen(command, env=environ,

                                          stdout=subprocess.PIPE,
@@ -129,10 +128,10 @@ 

          # Ok, we're not on a live tty, so pipe and log.

          if pipe:

              log.debug('Running %s | %s and logging output' %

-                           (' '.join(cmd), ' '.join(pipe)))

+                       (' '.join(cmd), ' '.join(pipe)))

          else:

              log.debug('Running %s and logging output' %

-                            ' '.join(cmd))

+                       ' '.join(cmd))

          try:

              if pipe:

                  proc1 = subprocess.Popen(command, env=environ,
@@ -141,10 +140,10 @@ 

                                           shell=shell,

                                           cwd=cwd)

                  proc = subprocess.Popen(pipecmd, env=environ,

-                                          stdin=proc1.stdout,

-                                          stdout=subprocess.PIPE,

-                                          stderr=subprocess.PIPE, shell=shell,

-                                          cwd=cwd)

+                                         stdin=proc1.stdout,

+                                         stdout=subprocess.PIPE,

+                                         stderr=subprocess.PIPE, shell=shell,

+                                         cwd=cwd)

                  output, error = proc.communicate()

              else:

                  proc = subprocess.Popen(command, env=environ,
@@ -157,9 +156,9 @@ 

          log.info(output)

          if proc.returncode:

              print('Command %s returned code %s with error: %s' %

-                               (' '.join(cmd),

-                                proc.returncode,

-                                error))

+                   (' '.join(cmd),

+                    proc.returncode,

+                    error))

      result = re.search('.*Literal data: (.*) bytes.*', output, re.MULTILINE)

      try:

          transferred = result.group(1)
@@ -173,6 +172,7 @@ 

  

      return (transferred, deleted)

  

+ 

  def syncArch(arch, repodata, fedmsg=False):

      '''

      Sync a binary rpm tree pass in arch and True/False for syncing repodata or not
@@ -199,10 +199,12 @@ 

      else:

          print(cmd)

  

+ 

  def syncSRPM(srpm, arch, dest, source, fedmsg=False):

      if not os.path.isdir(dest):

          os.makedirs(dest)

-     cmd = ['rsync', '-avhH', 'rsync://%s.koji.fedoraproject.org/fedora-%s/%s/%s' % (arch, arch, source, srpm), '%s/%s' % (dest, srpm)]

+     cmd = ['rsync', '-avhH', 'rsync://%s.koji.fedoraproject.org/fedora-%s/%s/%s' % (arch, arch, source, srpm),

+            '%s/%s' % (dest, srpm)]

      log.debug(cmd)

      if not opts.only_show:

          transferred, deleted = _run_command(cmd)
@@ -212,27 +214,29 @@ 

      else:

          print(cmd)

  

+ 

  def getSRPM(rpmfilename):

      """ get the SRPM of a given RPM. """

      ts = rpm.TransactionSet()

-     ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) 

+     ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)

      fd = os.open(rpmfilename, os.O_RDONLY)

      h = None

      try:

          h = ts.hdrFromFdno(fd)

      except rpm.error, e:

          if str(e) == "error reading package header ":

-            print("rpmfile: %s" % rpmfilename)

-            print(str(e))

-            return None

+             print("rpmfile: %s" % rpmfilename)

+             print(str(e))

+             return None

      os.close(fd)

-     if h == None:

-          print("Issues with rpm: %s" % rpmfilename)

-          return None

+     if h is None:

+         print("Issues with rpm: %s" % rpmfilename)

+         return None

      return h.sprintf('%{SOURCERPM}')

  

+ 

  def srpmLocation(base, package):

-     ''' 

+     '''

      Takes a base path for the sources rpm

      Returns the target path the arch of the hub and path on the hub to find the source

      '''
@@ -246,23 +250,24 @@ 

                      break

              break

  

-     if not (base.endswith(basearch) or base.endswith("%s/%s" % (basearch, package[1]))) :

+     if not (base.endswith(basearch) or base.endswith("%s/%s" % (basearch, package[1]))):

          target = base.replace('%s/os/Packages' % basearch, 'source/SRPMS')

      else:

          target = base.replace(basearch, 'SRPMS')

      source = target.replace(TARGETPATH, '', 1)

      return (hubarch, target, source)

  

+ 

  def main(opts):

      # Sync the given arch or all arches unless told to not sync

      if not opts.no_sync:

-          if opts.onlyarch:

-              syncArch(opts.onlyarch, False)

-              syncArch(opts.onlyarch, True, opts.fedmsg)

-          else:

-              for arch in CANONARCHES:

-                  syncArch(arch, False)

-                  syncArch(arch, True, opts.fedmsg)

+         if opts.onlyarch:

+             syncArch(opts.onlyarch, False)

+             syncArch(opts.onlyarch, True, opts.fedmsg)

+         else:

+             for arch in CANONARCHES:

+                 syncArch(arch, False)

+                 syncArch(arch, True, opts.fedmsg)

  

      # work out what SRPMS we need and what can be removed.

      srpms = {}
@@ -271,7 +276,7 @@ 

      to_delete_srpms = {}

      sourcerepo = []

      for root, dirs, files in os.walk(TARGETPATH):

-         if not root.find('/archive/') == -1 or not root.find('/.snapshot/') == -1 :

+         if not root.find('/archive/') == -1 or not root.find('/.snapshot/') == -1:

              continue

          if root == []:

              continue
@@ -283,8 +288,8 @@ 

              sourcerepo.append(root)

          for name in files:

              if name.endswith('rpm') and not name.endswith('src.rpm'):

-                 srpmfile = getSRPM(os.path.join( root, name))

-                 if not srpmfile == None:

+                 srpmfile = getSRPM(os.path.join(root, name))

+                 if srpmfile is not None:

                      print("getting data for %s" % name)

                      srpms[srpmfile] = srpmLocation(root, name)

              if name.endswith('src.rpm'):
@@ -293,14 +298,13 @@ 

      for srpm in zip(srpms.keys(), srpms.values()):

          if srpm[0] not in existing_srpms.keys() and srpm[0] not in to_sync_srpms.keys():

              print("Need: %s  At: %s" % (srpm[0], srpm[1]))

-             to_sync_srpms[srpm[0]] = srpm[1] 

+             to_sync_srpms[srpm[0]] = srpm[1]

  

      for srpm in zip(existing_srpms.keys(), existing_srpms.values()):

          if srpm[0] not in srpms.keys() and srpm[0] not in to_delete_srpms.keys():

              print("To Delete: %s" % srpm[0])

              to_delete_srpms[srpm[0]] = srpm[1]

  

- 

      for files in zip(to_sync_srpms.keys(), to_sync_srpms.values()):

          print(files)

          srpm = files[0]
@@ -330,26 +334,25 @@ 

              if error:

                  if sys.stdout.isatty():

                      sys.stderr.write(error)

-                 #else:

-                     # Yes, we could wind up sending error output to stdout in the

-                     # case of no local tty, but I don't have a better way to do this.

-                     #self.log.info(error)

+                 # else:

+                 # Yes, we could wind up sending error output to stdout in the

+                 # case of no local tty, but I don't have a better way to do this.

+                 # self.log.info(error)

              if proc.returncode:

                  print("error making repodata %s", proc.returncode)

-         

+ 

  

  if __name__ == '__main__':

-     opt_p = argparse.ArgumentParser(usage = "%(prog)s [OPTIONS] ")

+     opt_p = argparse.ArgumentParser(usage="%(prog)s [OPTIONS] ")

      opt_p.add_argument('-a', '--arch', action='store', dest='onlyarch',

-                      default=False, help="sync only the given arch.")

+                        default=False, help="sync only the given arch.")

      opt_p.add_argument('-s', '--show', action='store_true', dest='only_show',

-                      default=False, help="Show what would be done but dont actually do it.")

+                        default=False, help="Show what would be done but dont actually do it.")

      opt_p.add_argument('-n', '--no-sync', action='store_true', dest='no_sync',

-                      default=False, help="Skip syncing new bits.")

+                        default=False, help="Skip syncing new bits.")

      opt_p.add_argument('--disable-fedmsg', dest="fedmsg", action="store_false",

-                      help="Disable fedmsg notifications", default=True)

+                        help="Disable fedmsg notifications", default=True)

  

      args, extras = opt_p.parse_known_args()

  

      main(args)

- 

@@ -120,7 +120,7 @@ 

          builds = [build['nvr'] for build in

                    self.kojisession.listTagged(tag, latest=True,

                                                inherit=inherit)

-                  ]

+                   ]

          return builds

  

      def get_build_ids(self, nvrs):
@@ -513,8 +513,7 @@ 

      def run_sigul(rpms, batchnr):

          global status

          logging.info('Signing batch %s/%s with %s rpms' % (

-             batchnr, (total + batchsize - 1) / batchsize, len(rpms))

-                     )

+             batchnr, (total + batchsize - 1) / batchsize, len(rpms)))

          command = sigul_helper.build_sign_cmdline(rpms)

          logging.debug('Running %s' % subprocess.list2cmdline(command))

          ret = sigul_helper.run_command(command, False)[0]

@@ -40,7 +40,7 @@ 

  for srpm in srpms:

      """Return the rpm header."""

      ts = rpm.TransactionSet()

-     ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)

+     ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS)

      fo = file(str("%s" % (srpm)), "r")

      hdr = ts.hdrFromFdno(fo.fileno())

      fo.close()
@@ -63,6 +63,6 @@ 

  else:

      output = ""

      for pkg in pkglist:

-         output +=  pkg + " "

+         output += pkg + " "

  

      print(output)

@@ -1,6 +1,6 @@ 

  #!/usr/bin/python

  #

- # synd-blocked-primary.py - A utility to sync blocked packages in primary koji 

+ # synd-blocked-primary.py - A utility to sync blocked packages in primary koji

  #                           to a secondary arch for a given tag

  #

  # Copyright (C) 2011-2013 Red Hat, Inc.
@@ -18,15 +18,16 @@ 

  

  # Set some variables

  # Some of these could arguably be passed in as args.

- tags = ['f26', 'f25', 'f24', 'f23'] # tag to check in koji

+ tags = ['f26', 'f25', 'f24', 'f23']  # tag to check in koji

  

  arches = ['arm', 'ppc', 's390']

  

  koji_module = koji.get_profile_module("fedora")

  kojisession = koji_module.ClientSession(koji_module.config.server)

  

+ 

  def getBlocked(kojisession, tag):

-     blocked = [] # holding for blocked pkgs

+     blocked = []  # holding for blocked pkgs

      pkgs = kojisession.listPackages(tagID=tag)

      # Check the pkg list for blocked packages

      for pkg in pkgs:
@@ -34,8 +35,9 @@ 

              blocked.append(pkg['package_name'])

      return blocked

  

+ 

  def getUnBlocked(kojisession, tag):

-     unblocked = [] # holding for blocked pkgs

+     unblocked = []  # holding for blocked pkgs

      pkgs = kojisession.listPackages(tagID=tag)

      # Check the pkg list for blocked packages

      for pkg in pkgs:
@@ -43,6 +45,7 @@ 

              unblocked.append(pkg['package_name'])

      return unblocked

  

+ 

  for arch in arches:

      print("== Working on Arch: %s" % arch)

      # Create a koji session
@@ -52,7 +55,7 @@ 

  

      for tag in tags:

          print("=== Working on tag: %s" % tag)

-         secblocked = [] # holding for blocked pkgs

+         secblocked = []  # holding for blocked pkgs

          toblock = []

          unblock = []

  
@@ -65,17 +68,17 @@ 

              if pkg not in secblocked:

                  toblock.append(pkg)

                  print("need to block %s" % pkg)

-         

+ 

          for pkg in secblocked:

              if pkg not in priblocked:

                  unblock.append(pkg)

                  print("need to unblock %s" % pkg)

-         

+ 

          for pkg in priunblocked:

              if pkg not in secunblocked:

                  unblock.append(pkg)

                  print("need to unblock %s" % pkg)

-         

+ 

          seckojisession.multicall = True

          for pkg in toblock:

              print("Blocking: %s" % pkg)
@@ -85,7 +88,6 @@ 

              print("UnBlocking: %s" % pkg)

              seckojisession.packageListUnblock(tag, pkg)

  

- 

          listings = seckojisession.multiCall()

  

      seckojisession.logout()

file modified
+17 -13
@@ -1,8 +1,8 @@ 

  #!/usr/bin/python

  # -*- coding: utf-8 -*-

  #

- # synd-tagged-primary.py - A utility to sync tagged packages in primary koji 

- #                           to a secondary arch 

+ # synd-tagged-primary.py - A utility to sync tagged packages in primary koji

+ #                           to a secondary arch

  #

  # Copyright (C) 2012-2013 Red Hat, Inc.

  # SPDX-License-Identifier:      GPL-2.0+
@@ -39,27 +39,29 @@ 

  session_opts['krbservice'] = 'host'

  session_opts['krb_rdns'] = False

  

+ 

  def getTagged(kojisession, tag):

-     tagged = [] # holding for blocked pkgs

+     tagged = []  # holding for blocked pkgs

      pkgs = kojisession.listTagged(tag, latest=True)

      # Check the pkg list for blocked packages

-     #for pkg in pkgs:

+     # for pkg in pkgs:

      #    tagged.append({"name": pkg['name'], "nvr": pkg['nvr']})

-             

+ 

      #    print("tagged build %s" % pkg['nvr'])

      return pkgs

  

- def rpmvercmp ((e1, v1, r1), (e2, v2, r2)):

+ 

+ def rpmvercmp((e1, v1, r1), (e2, v2, r2)):

      """find out which build is newer"""

      rc = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))

      if rc == 1:

-         #first evr wins

+         # first evr wins

          return 1

      elif rc == 0:

-         #same evr

+         # same evr

          return 0

      else:

-         #second evr wins

+         # second evr wins

          return -1

  

  
@@ -79,7 +81,7 @@ 

  

  for tag in args.tag:

      print("=== Working on tag: %s ====" % tag)

-     secblocked = [] # holding for blocked pkgs

+     secblocked = []  # holding for blocked pkgs

      totag = []

      tountag = []

      pripkgnvrs = []
@@ -97,7 +99,7 @@ 

          if pkg['nvr'] not in secpkgnvrs:

              secpkg = seckojisession.getBuild(pkg['nvr'])

              # see if we have the build on secondary koji and make sure its complete

-             if not secpkg is None and secpkg['state'] == 1 :

+             if secpkg is not None and secpkg['state'] == 1:

                  totag.append(pkg['nvr'])

                  print("need to tag %s" % pkg['nvr'])

  
@@ -109,10 +111,12 @@ 

                  # if the package only exists on secondary let it be

                  print("Secondary arch only package %s" % pkg['nvr'])

              # secondary arch evr is higher than primary untag ours

- 	    elif pripkg[0]['active'] == None:

+             elif pripkg[0]['active'] is None:

                  # get the latest build from primary in the tag

                  pripkg = kojisession.listTagged(tag, latest=True, package=pkg['name'])

-                 if pripkg == [] or rpmvercmp((str(pkg['epoch']), pkg['version'], pkg['release']),  (str(pripkg[0]['epoch']), pripkg[0]['version'], pripkg[0]['release'])) == 1:

+                 if pripkg == [] or rpmvercmp((str(pkg['epoch']), pkg['version'], pkg['release']),

+                                              (

+                                              str(pripkg[0]['epoch']), pripkg[0]['version'], pripkg[0]['release'])) == 1:

                      tountag.append(pkg['nvr'])

                      print("need to untag %s" % pkg['nvr'])

  

file modified
+10 -9
@@ -15,21 +15,22 @@ 

  

      @property

      def disabledOptions(self):

-         return ["W0105",           # String statement has no effect

-                 "W0110",           # map/filter on lambda could be replaced by comprehension

-                 "W0141",           # Used builtin function %r

-                 "W0142",           # Used * or ** magic

-                 "W0212",           # Access to a protected member of a client class

-                 "W0511",           # Used when a warning note as FIXME or XXX is detected.

-                 "W0603",           # Using the global statement

-                 "W0614",           # Unused import %s from wildcard import

-                 "I0011",           # Locally disabling %s

+         return ["W0105",  # String statement has no effect

+                 "W0110",  # map/filter on lambda could be replaced by comprehension

+                 "W0141",  # Used builtin function %r

+                 "W0142",  # Used * or ** magic

+                 "W0212",  # Access to a protected member of a client class

+                 "W0511",  # Used when a warning note as FIXME or XXX is detected.

+                 "W0603",  # Using the global statement

+                 "W0614",  # Unused import %s from wildcard import

+                 "I0011",  # Locally disabling %s

                  ]

  

      @property

      def ignoreNames(self):

          return {}

  

+ 

  if __name__ == "__main__":

      conf = FedoraRelengLintConfig()

      linter = PocketLinter(conf)

This is mostly formatting changes to make our code more pep8 compliant.

Looks good to me, we should probably look at having a tox job running flake8 on https://jenkins-fedora-apps.apps.ci.centos.org/

Pull-Request has been closed by humaton

2 years ago
Metadata
Changes Summary 43
+3 -3
file changed
Makefile
+3 -5
file changed
scripts/block_retired.py
+4 -7
file changed
scripts/branching/modulepkg.py
+3 -2
file changed
scripts/build-current.py
+43 -37
file changed
scripts/build-previous.py
+2 -0
file changed
scripts/check-latest-build.py
+5 -5
file changed
scripts/check-testing-pre-GA.py
+120 -101
file changed
scripts/check-unretirement.py
+24 -17
file changed
scripts/check-upgrade-paths.py
+26 -18
file changed
scripts/check_epel_deps.py
+8 -2
file changed
scripts/clean-overrides.py
+43 -33
file changed
scripts/critpath.py
+27 -17
file changed
scripts/fedora-torrent-ini.py
+3 -3
file changed
scripts/find-bad-builds.py
+31 -20
file changed
scripts/find-hidden-packages.py
+7 -7
file changed
scripts/find_failures.py
+5 -9
file changed
scripts/find_unblocked_orphans.py
+1 -2
file changed
scripts/isolate-tag.py
+12 -11
file changed
scripts/koji-build-srpm.py
+42 -40
file changed
scripts/koji-compare.py
+42 -34
file changed
scripts/koji-import.py
+23 -23
file changed
scripts/koji-reimport.py
+48 -49
file changed
scripts/koji-stalk.py
+12 -6
file changed
scripts/mass-rebuild-close-bugs.py
+49 -41
file changed
scripts/mass-rebuild-modules.py
+72 -19
file changed
scripts/mass-rebuild-special.py
+8 -7
file changed
scripts/mass-rebuild.py
+10 -10
file changed
scripts/mass-tag.py
+27 -28
file changed
scripts/mass_rebuild_file_bugs.py
+5 -5
file changed
scripts/mbs/find-unsigned-modules.py
+10 -11
file changed
scripts/need-rebuild.py
+7 -9
file changed
scripts/pdc/adjust-eol-all.py
+2 -2
file changed
scripts/pdc/create-new-release-branches.py
+7 -3
file changed
scripts/pdc/sync-branches-from-pkgdb.py
+0 -1
file changed
scripts/pdc/utilities.py
+7 -8
file changed
scripts/prune-tag.py
+30 -28
file changed
scripts/push-two-week-atomic.py
+53 -50
file changed
scripts/secondary-sync.py
+2 -3
file changed
scripts/sigulsign_unsigned.py
+2 -2
file changed
scripts/srpm-excluded-arch.py
+11 -9
file changed
scripts/sync-blocked-primary.py
+17 -13
file changed
scripts/sync-tagged-primary.py
+10 -9
file changed
tests/pylint/runpylint.py