| |
@@ -30,17 +30,23 @@
|
| |
import koji.util
|
| |
import koji.tasks
|
| |
import glob
|
| |
+ try:
|
| |
+ import json
|
| |
+ except ImportError: # pragma: no cover
|
| |
+ import simplejson as json
|
| |
import logging
|
| |
import logging.handlers
|
| |
from koji.daemon import incremental_upload, log_output, TaskManager, SCM
|
| |
from koji.tasks import ServerExit, ServerRestart, BaseTaskHandler, MultiPlatformTask
|
| |
from koji.util import parseStatus, isSuccess, dslice, dslice_ex
|
| |
+ import multilib.multilib as multilib
|
| |
import os
|
| |
import pwd
|
| |
import grp
|
| |
import random
|
| |
import re
|
| |
import rpm
|
| |
+ import rpmUtils.arch
|
| |
import shutil
|
| |
import signal
|
| |
import smtplib
|
| |
@@ -58,6 +64,8 @@
|
| |
from gzip import GzipFile
|
| |
from optparse import OptionParser, SUPPRESS_HELP
|
| |
from yum import repoMDObject
|
| |
+ import yum.packages
|
| |
+ import yum.Errors
|
| |
|
| |
#imports for LiveCD, LiveMedia, and Appliance handler
|
| |
image_enabled = False
|
| |
@@ -4831,8 +4839,8 @@
|
| |
if os.path.getsize(pkglist) == 0:
|
| |
pkglist = None
|
| |
self.create_local_repo(rinfo, arch, pkglist, groupdata, oldrepo)
|
| |
-
|
| |
- external_repos = self.session.getExternalRepoList(rinfo['tag_id'], event=rinfo['create_event'])
|
| |
+ external_repos = self.session.getExternalRepoList(
|
| |
+ rinfo['tag_id'], event=rinfo['create_event'])
|
| |
if external_repos:
|
| |
self.merge_repos(external_repos, arch, groupdata)
|
| |
elif pkglist is None:
|
| |
@@ -4845,10 +4853,9 @@
|
| |
for f in os.listdir(self.datadir):
|
| |
files.append(f)
|
| |
self.session.uploadWrapper('%s/%s' % (self.datadir, f), uploadpath, f)
|
| |
-
|
| |
return [uploadpath, files]
|
| |
|
| |
- def create_local_repo(self, rinfo, arch, pkglist, groupdata, oldrepo):
|
| |
+ def create_local_repo(self, rinfo, arch, pkglist, groupdata, oldrepo, oldpkgs=None):
|
| |
koji.ensuredir(self.outdir)
|
| |
if self.options.use_createrepo_c:
|
| |
cmd = ['/usr/bin/createrepo_c']
|
| |
@@ -4875,6 +4882,11 @@
|
| |
cmd.append('--update')
|
| |
if self.options.createrepo_skip_stat:
|
| |
cmd.append('--skip-stat')
|
| |
+ if oldpkgs is not None:
|
| |
+ # generate delta-rpms
|
| |
+ cmd.append('--deltas')
|
| |
+ for op_dir in oldpkgs:
|
| |
+ cmd.extend(['--oldpackagedirs', op_dir])
|
| |
# note: we can't easily use a cachedir because we do not have write
|
| |
# permission. The good news is that with --update we won't need to
|
| |
# be scanning many rpms.
|
| |
@@ -4920,6 +4932,423 @@
|
| |
raise koji.GenericError('failed to merge repos: %s' \
|
| |
% parseStatus(status, ' '.join(cmd)))
|
| |
|
| |
+
|
| |
+ class NewDistRepoTask(BaseTaskHandler):
|
| |
+ Methods = ['distRepo']
|
| |
+ _taskWeight = 0.1
|
| |
+
|
| |
+ def handler(self, tag, repo_id, keys, task_opts):
|
| |
+ tinfo = self.session.getTag(tag, strict=True, event=task_opts['event'])
|
| |
+ path = koji.pathinfo.distrepo(repo_id, tinfo['name'])
|
| |
+ if len(task_opts['arch']) == 0:
|
| |
+ arches = tinfo['arches'] or ''
|
| |
+ task_opts['arch'] = arches.split()
|
| |
+ if len(task_opts['arch']) == 0:
|
| |
+ raise koji.GenericError('No arches specified nor for the tag!')
|
| |
+ subtasks = {}
|
| |
+ # weed out subarchitectures
|
| |
+ canonArches = set()
|
| |
+ for arch in task_opts['arch']:
|
| |
+ canonArches.add(koji.canonArch(arch))
|
| |
+ arch32s = set()
|
| |
+ for arch in canonArches:
|
| |
+ if not rpmUtils.arch.isMultiLibArch(arch):
|
| |
+ arch32s.add(arch)
|
| |
+ for arch in arch32s:
|
| |
+ # we do 32-bit multilib arches first so the 64-bit ones can
|
| |
+ # get a task ID and wait for them to complete
|
| |
+ arglist = [tag, repo_id, arch, keys, task_opts]
|
| |
+ subtasks[arch] = self.session.host.subtask(
|
| |
+ method='createdistrepo', arglist=arglist, label=arch,
|
| |
+ parent=self.id, arch='noarch')
|
| |
+ if len(subtasks) > 0 and task_opts['multilib']:
|
| |
+ results = self.wait(subtasks.values(), all=True, failany=True)
|
| |
+ for arch in arch32s:
|
| |
+ # move the 32-bit task output to the final resting place
|
| |
+ # so the 64-bit arches can use it for multilib
|
| |
+ upload, files, sigmap = results[subtasks[arch]]
|
| |
+ self.session.host.distRepoMove(
|
| |
+ repo_id, upload, files, arch, sigmap)
|
| |
+ for arch in canonArches:
|
| |
+ # do the other arches
|
| |
+ if arch not in arch32s:
|
| |
+ arglist = [tag, repo_id, arch, keys, task_opts]
|
| |
+ subtasks[arch] = self.session.host.subtask(
|
| |
+ method='createdistrepo', arglist=arglist, label=arch,
|
| |
+ parent=self.id, arch='noarch')
|
| |
+ # wait for 64-bit subtasks to finish
|
| |
+ data = {}
|
| |
+ results = self.wait(subtasks.values(), all=True, failany=True)
|
| |
+ for (arch, task_id) in subtasks.iteritems():
|
| |
+ data[arch] = results[task_id]
|
| |
+ self.logger.debug("DEBUG: %r : %r " % (arch, data[arch]))
|
| |
+ if task_opts['multilib'] and arch in arch32s:
|
| |
+ # already moved above
|
| |
+ continue
|
| |
+ #else
|
| |
+ upload, files, sigmap = results[subtasks[arch]]
|
| |
+ self.session.host.distRepoMove(
|
| |
+ repo_id, upload, files, arch, sigmap)
|
| |
+ self.session.host.repoDone(repo_id, data, expire=False)
|
| |
+ return 'Dist repository #%s successfully generated' % repo_id
|
| |
+
|
| |
+
|
| |
+ class createDistRepoTask(CreaterepoTask):
|
| |
+ Methods = ['createdistrepo']
|
| |
+ _taskWeight = 1.5
|
| |
+
|
| |
+ archmap = {'s390x': 's390', 'ppc64': 'ppc', 'x86_64': 'i686'}
|
| |
+ compat = {"i386": ("athlon", "i686", "i586", "i486", "i386", "noarch"),
|
| |
+ "x86_64": ("amd64", "ia32e", "x86_64", "noarch"),
|
| |
+ "ia64": ("ia64", "noarch"),
|
| |
+ "ppc": ("ppc", "noarch"),
|
| |
+ "ppc64": ("ppc64p7", "ppc64pseries", "ppc64iseries", "ppc64", "noarch"),
|
| |
+ "ppc64le": ("ppc64le", "noarch"),
|
| |
+ "s390": ("s390", "noarch"),
|
| |
+ "s390x": ("s390x", "noarch"),
|
| |
+ "sparc": ("sparcv9v", "sparcv9", "sparcv8", "sparc", "noarch"),
|
| |
+ "sparc64": ("sparc64v", "sparc64", "noarch"),
|
| |
+ "alpha": ("alphaev6", "alphaev56", "alphaev5", "alpha", "noarch"),
|
| |
+ "arm": ("arm", "armv4l", "armv4tl", "armv5tel", "armv5tejl", "armv6l", "armv7l", "noarch"),
|
| |
+ "armhfp": ("armv7hl", "armv7hnl", "noarch"),
|
| |
+ "aarch64": ("aarch64", "noarch"),
|
| |
+ "src": ("src",)
|
| |
+ }
|
| |
+
|
| |
+ biarch = {"ppc": "ppc64", "x86_64": "i386", "sparc":
|
| |
+ "sparc64", "s390x": "s390", "ppc64": "ppc"}
|
| |
+
|
| |
+ def handler(self, tag, repo_id, arch, keys, opts):
|
| |
+ #arch is the arch of the repo, not the task
|
| |
+ self.rinfo = self.session.repoInfo(repo_id, strict=True)
|
| |
+ if self.rinfo['state'] != koji.REPO_INIT:
|
| |
+ raise koji.GenericError("Repo %(id)s not in INIT state (got %(state)s)" % self.rinfo)
|
| |
+ self.repo_id = self.rinfo['id']
|
| |
+ self.pathinfo = koji.PathInfo(self.options.topdir)
|
| |
+ groupdata = os.path.join(
|
| |
+ self.pathinfo.distrepo(repo_id, self.rinfo['tag_name']),
|
| |
+ 'groups', 'comps.xml')
|
| |
+ #set up our output dir
|
| |
+ self.repodir = '%s/repo' % self.workdir
|
| |
+ koji.ensuredir(self.repodir)
|
| |
+ self.outdir = self.repodir # workaround create_local_repo use
|
| |
+ self.datadir = '%s/repodata' % self.repodir
|
| |
+ self.sigmap = {}
|
| |
+ oldpkgs = []
|
| |
+ if opts.get('delta'):
|
| |
+ # should be a list of repo ids to delta against
|
| |
+ for repo_id in opts['delta']:
|
| |
+ oldrepo = self.session.repoInfo(repo_id, strict=True)
|
| |
+ if not oldrepo['dist']:
|
| |
+ raise koji.GenericError("Base repo for deltas must also "
|
| |
+ "be a dist repo")
|
| |
+ # regular repos don't actually have rpms, just pkglist
|
| |
+ path = koji.pathinfo.distrepo(repo_id, oldrepo['tag_name'])
|
| |
+ if not os.path.exists(path):
|
| |
+ raise koji.GenericError('Base drpm repo missing: %s' % path)
|
| |
+ oldpkgs.append(path)
|
| |
+ self.uploadpath = self.getUploadDir()
|
| |
+ self.pkglist = self.make_pkglist(tag, arch, keys, opts)
|
| |
+ if opts['multilib'] and rpmUtils.arch.isMultiLibArch(arch):
|
| |
+ self.do_multilib(arch, self.archmap[arch], opts['multilib'])
|
| |
+ self.write_kojipkgs()
|
| |
+ self.logger.debug('package list is %s' % self.pkglist)
|
| |
+ self.session.uploadWrapper(self.pkglist, self.uploadpath,
|
| |
+ os.path.basename(self.pkglist))
|
| |
+ if os.path.getsize(self.pkglist) == 0:
|
| |
+ self.pkglist = None
|
| |
+ self.create_local_repo(self.rinfo, arch, self.pkglist, groupdata, None, oldpkgs=oldpkgs)
|
| |
+ if self.pkglist is None:
|
| |
+ fo = file(os.path.join(self.datadir, "EMPTY_REPO"), 'w')
|
| |
+ fo.write("This repo is empty because its tag has no content for this arch\n")
|
| |
+ fo.close()
|
| |
+ files = ['pkglist', 'kojipkgs']
|
| |
+ for f in os.listdir(self.datadir):
|
| |
+ files.append(f)
|
| |
+ self.session.uploadWrapper('%s/%s' % (self.datadir, f),
|
| |
+ self.uploadpath, f)
|
| |
+ if opts['delta']:
|
| |
+ ddir = os.path.join(self.repodir, 'drpms')
|
| |
+ for f in os.listdir(ddir):
|
| |
+ files.append(f)
|
| |
+ self.session.uploadWrapper('%s/%s' % (ddir, f),
|
| |
+ self.uploadpath, f)
|
| |
+ return [self.uploadpath, files, self.sigmap.items()]
|
| |
+
|
| |
+ def do_multilib(self, arch, ml_arch, conf):
|
| |
+ self.repo_id = self.rinfo['id']
|
| |
+ pathinfo = koji.PathInfo(self.options.topdir)
|
| |
+ repodir = pathinfo.distrepo(self.rinfo['id'], self.rinfo['tag_name'])
|
| |
+ mldir = os.path.join(repodir, koji.canonArch(ml_arch))
|
| |
+ ml_true = set() # multilib packages we need to include before depsolve
|
| |
+ ml_conf = os.path.join(self.pathinfo.work(), conf)
|
| |
+
|
| |
+ # step 1: figure out which packages are multilib (should already exist)
|
| |
+ mlm = multilib.DevelMultilibMethod(ml_conf)
|
| |
+ fs_missing = set()
|
| |
+ with open(self.pkglist) as pkglist:
|
| |
+ for pkg in pkglist:
|
| |
+ ppath = os.path.join(self.repodir, pkg.strip())
|
| |
+ po = yum.packages.YumLocalPackage(filename=ppath)
|
| |
+ if mlm.select(po) and arch in self.archmap:
|
| |
+ # we need a multilib package to be included
|
| |
+ # we assume the same signature level is available
|
| |
+ # XXX: what is a subarchitecture is the right answer?
|
| |
+ pl_path = pkg.replace(arch, self.archmap[arch]).strip()
|
| |
+ # assume this exists in the task results for the ml arch
|
| |
+ real_path = os.path.join(mldir, pl_path)
|
| |
+ if not os.path.exists(real_path):
|
| |
+ self.logger.error('%s (multilib) is not on the filesystem' % real_path)
|
| |
+ fs_missing.add(real_path)
|
| |
+ # we defer failure so can report all the missing deps
|
| |
+ continue
|
| |
+ ml_true.add(real_path)
|
| |
+
|
| |
+ # step 2: set up architectures for yum configuration
|
| |
+ self.logger.info("Resolving multilib for %s using method devel" % arch)
|
| |
+ yumbase = yum.YumBase()
|
| |
+ yumbase.verbose_logger.setLevel(logging.ERROR)
|
| |
+ yumdir = os.path.join(self.workdir, 'yum')
|
| |
+ # TODO: unwind this arch mess
|
| |
+ archlist = (arch, 'noarch')
|
| |
+ transaction_arch = arch
|
| |
+ archlist = archlist + self.compat[self.biarch[arch]]
|
| |
+ best_compat = self.compat[self.biarch[arch]][0]
|
| |
+ if rpmUtils.arch.archDifference(best_compat, arch) > 0:
|
| |
+ transaction_arch = best_compat
|
| |
+ if hasattr(rpmUtils.arch, 'ArchStorage'):
|
| |
+ yumbase.preconf.arch = transaction_arch
|
| |
+ else:
|
| |
+ rpmUtils.arch.canonArch = transaction_arch
|
| |
+
|
| |
+ yconfig = """
|
| |
+ [main]
|
| |
+ debuglevel=2
|
| |
+ pkgpolicy=newest
|
| |
+ exactarch=1
|
| |
+ gpgcheck=0
|
| |
+ reposdir=/dev/null
|
| |
+ cachedir=/yumcache
|
| |
+ installroot=%s
|
| |
+ logfile=/yum.log
|
| |
+
|
| |
+ [koji-%s]
|
| |
+ name=koji multilib task
|
| |
+ baseurl=file://%s
|
| |
+ enabled=1
|
| |
+
|
| |
+ """ % (yumdir, self.id, mldir)
|
| |
+ os.makedirs(os.path.join(yumdir, "yumcache"))
|
| |
+ os.makedirs(os.path.join(yumdir, 'var/lib/rpm'))
|
| |
+
|
| |
+ # step 3: proceed with yum config and set up
|
| |
+ yconfig_path = os.path.join(yumdir, 'yum.conf-koji-%s' % arch)
|
| |
+ f = open(yconfig_path, 'w')
|
| |
+ f.write(yconfig)
|
| |
+ f.close()
|
| |
+ self.session.uploadWrapper(yconfig_path, self.uploadpath,
|
| |
+ os.path.basename(yconfig_path))
|
| |
+ yumbase.doConfigSetup(fn=yconfig_path)
|
| |
+ yumbase.conf.cache = 0
|
| |
+ yumbase.doRepoSetup()
|
| |
+ yumbase.doTsSetup()
|
| |
+ yumbase.doRpmDBSetup()
|
| |
+ # we trust Koji's files, so skip verifying sigs and digests
|
| |
+ yumbase.ts.pushVSFlags(
|
| |
+ (rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS))
|
| |
+ yumbase.doSackSetup(archlist=archlist, thisrepo='koji-%s' % arch)
|
| |
+ yumbase.doSackFilelistPopulate()
|
| |
+ for pkg in ml_true:
|
| |
+ # TODO: store packages by first letter
|
| |
+ # ppath = os.path.join(pkgdir, pkg.name[0].lower(), pname)
|
| |
+ po = yum.packages.YumLocalPackage(filename=pkg)
|
| |
+ yumbase.tsInfo.addInstall(po)
|
| |
+
|
| |
+ # step 4: execute yum transaction to get dependencies
|
| |
+ self.logger.info("Resolving depenencies for arch %s" % arch)
|
| |
+ rc, errors = yumbase.resolveDeps()
|
| |
+ ml_needed = {}
|
| |
+ for tspkg in yumbase.tsInfo.getMembers():
|
| |
+ bnp = os.path.basename(tspkg.po.localPkg())
|
| |
+ dep_path = os.path.join(mldir, bnp[0].lower(), bnp)
|
| |
+ ml_needed[dep_path] = tspkg
|
| |
+ self.logger.debug("added %s" % dep_path)
|
| |
+ if not os.path.exists(dep_path):
|
| |
+ self.logger.error('%s (multilib dep) not on filesystem' % dep_path)
|
| |
+ fs_missing.add(dep_path)
|
| |
+ self.logger.info('yum return code: %s' % rc)
|
| |
+ if not rc:
|
| |
+ self.logger.error('yum depsolve was unsuccessful')
|
| |
+ raise koji.GenericError(errors)
|
| |
+ if len(fs_missing) > 0:
|
| |
+ missing_log = os.path.join(self.workdir, 'missing_multilib.log')
|
| |
+ outfile = open(missing_log, 'w')
|
| |
+ outfile.write('The following multilib files were missing:\n')
|
| |
+ for ml_path in fs_missing:
|
| |
+ outfile.write(ml_path)
|
| |
+ outfile.write('\n')
|
| |
+ outfile.close()
|
| |
+ self.session.uploadWrapper(missing_log, self.uploadpath)
|
| |
+ raise koji.GenericError('multilib packages missing. '
|
| |
+ 'See missing_multilib.log')
|
| |
+
|
| |
+ # get rpm ids for ml pkgs
|
| |
+ kpkgfile = os.path.join(mldir, 'kojipkgs')
|
| |
+ kojipkgs = json.load(open(kpkgfile, 'r'))
|
| |
+
|
| |
+ # step 5: add dependencies to our package list
|
| |
+ pkgwriter = open(self.pkglist, 'a')
|
| |
+ for dep_path in ml_needed:
|
| |
+ tspkg = ml_needed[dep_path]
|
| |
+ bnp = os.path.basename(dep_path)
|
| |
+ bnplet = bnp[0].lower()
|
| |
+ koji.ensuredir(os.path.join(self.repodir, bnplet))
|
| |
+ dst = os.path.join(self.repodir, bnplet, bnp)
|
| |
+ if os.path.exists(dst):
|
| |
+ # we expect duplication with noarch, but not other arches
|
| |
+ if tspkg.arch != 'noarch':
|
| |
+ self.logger.warning("Path exists: %r", dst)
|
| |
+ continue
|
| |
+ pkgwriter.write(bnplet + '/' + bnp + '\n')
|
| |
+ self.logger.debug("os.symlink(%r, %r)", dep_path, dst)
|
| |
+ os.symlink(dep_path, dst)
|
| |
+ rpminfo = kojipkgs[bnp]
|
| |
+ self.sigmap[rpminfo['id']] = rpminfo['sigkey']
|
| |
+
|
| |
+
|
| |
+ def pick_key(self, keys, avail_keys):
|
| |
+ best = None
|
| |
+ best_idx = None
|
| |
+ for sigkey in avail_keys:
|
| |
+ if sigkey not in keys:
|
| |
+ # skip, not a key we are looking for
|
| |
+ continue
|
| |
+ idx = keys.index(sigkey)
|
| |
+ # lower idx (earlier in list) is more preferrable
|
| |
+ if best is None or best_idx > idx:
|
| |
+ best = sigkey
|
| |
+ best_idx = idx
|
| |
+ return best
|
| |
+
|
| |
+
|
| |
+ def make_pkglist(self, tag_id, arch, keys, opts):
|
| |
+ # get the rpm data
|
| |
+ rpms = []
|
| |
+ builddirs = {}
|
| |
+ for a in self.compat[arch] + ('noarch',):
|
| |
+ rpm_iter, builds = self.session.listTaggedRPMS(tag_id,
|
| |
+ event=opts['event'], arch=a, latest=opts['latest'],
|
| |
+ inherit=opts['inherit'], rpmsigs=True)
|
| |
+ for build in builds:
|
| |
+ builddirs[build['id']] = self.pathinfo.build(build)
|
| |
+ rpms += list(rpm_iter)
|
| |
+
|
| |
+ # index by id and key
|
| |
+ preferred = {}
|
| |
+ rpm_idx = {}
|
| |
+ for rpminfo in rpms:
|
| |
+ sigidx = rpm_idx.setdefault(rpminfo['id'], {})
|
| |
+ sigidx[rpminfo['sigkey']] = rpminfo
|
| |
+
|
| |
+ # select our rpms
|
| |
+ selected = {}
|
| |
+ for rpm_id in rpm_idx:
|
| |
+ avail_keys = rpm_idx[rpm_id].keys()
|
| |
+ best_key = self.pick_key(keys, avail_keys)
|
| |
+ if best_key is None:
|
| |
+ # we lack a matching key for this rpm
|
| |
+ fallback = avail_keys[0]
|
| |
+ rpminfo = rpm_idx[rpm_id][fallback].copy()
|
| |
+ rpminfo['sigkey'] = None
|
| |
+ selected[rpm_id] = rpminfo
|
| |
+ else:
|
| |
+ selected[rpm_id] = rpm_idx[rpm_id][best_key]
|
| |
+
|
| |
+ #generate pkglist files
|
| |
+ pkgfile = os.path.join(self.repodir, 'pkglist')
|
| |
+ pkglist = file(pkgfile, 'w')
|
| |
+ fs_missing = []
|
| |
+ sig_missing = []
|
| |
+ kojipkgs = {}
|
| |
+ for rpm_id in selected:
|
| |
+ rpminfo = selected[rpm_id]
|
| |
+ if rpminfo['sigkey'] is None:
|
| |
+ sig_missing.append(rpm_id)
|
| |
+ if opts['skip_missing_signatures']:
|
| |
+ continue
|
| |
+ # use the primary copy, if allowed (checked below)
|
| |
+ pkgpath = '%s/%s' % (builddirs[rpminfo['build_id']],
|
| |
+ self.pathinfo.rpm(rpminfo))
|
| |
+ else:
|
| |
+ # use the signed copy
|
| |
+ pkgpath = '%s/%s' % (builddirs[rpminfo['build_id']],
|
| |
+ self.pathinfo.signed(rpminfo, rpminfo['sigkey']))
|
| |
+ if not os.path.exists(pkgpath):
|
| |
+ fs_missing.append(pkgpath)
|
| |
+ # we'll raise an error below
|
| |
+ else:
|
| |
+ bnp = os.path.basename(pkgpath)
|
| |
+ bnplet = bnp[0].lower()
|
| |
+ pkglist.write(bnplet + '/' + bnp + '\n')
|
| |
+ koji.ensuredir(os.path.join(self.repodir, bnplet))
|
| |
+ self.sigmap[rpminfo['id']] = rpminfo['sigkey']
|
| |
+ dst = os.path.join(self.repodir, bnplet, bnp)
|
| |
+ self.logger.debug("os.symlink(%r, %r(", pkgpath, dst)
|
| |
+ os.symlink(pkgpath, dst)
|
| |
+ kojipkgs[bnp] = rpminfo
|
| |
+ pkglist.close()
|
| |
+ self.kojipkgs = kojipkgs
|
| |
+
|
| |
+ # report problems
|
| |
+ if len(fs_missing) > 0:
|
| |
+ missing_log = os.path.join(self.workdir, 'missing_files.log')
|
| |
+ outfile = open(missing_log, 'w')
|
| |
+ outfile.write('Some rpm files were missing.\n'
|
| |
+ 'Most likely, you want to create these signed copies.\n\n'
|
| |
+ 'Missing files:\n')
|
| |
+ for pkgpath in sorted(fs_missing):
|
| |
+ outfile.write(pkgpath)
|
| |
+ outfile.write('\n')
|
| |
+ outfile.close()
|
| |
+ self.session.uploadWrapper(missing_log, self.uploadpath)
|
| |
+ raise koji.GenericError('Packages missing from the filesystem. '
|
| |
+ 'See missing_files.log.')
|
| |
+ if sig_missing:
|
| |
+ # log missing signatures and possibly error
|
| |
+ missing_log = os.path.join(self.workdir, 'missing_signatures.log')
|
| |
+ outfile = open(missing_log, 'w')
|
| |
+ outfile.write('Some rpms were missing requested signatures.\n')
|
| |
+ if opts['skip_missing_signatures']:
|
| |
+ outfile.write('The skip_missing_signatures option was specified, so '
|
| |
+ 'these files were excluded.\n')
|
| |
+ outfile.write('Acceptable keys: %r\n\n' % keys)
|
| |
+ outfile.write('# RPM name: available keys\n')
|
| |
+ fmt = '%(name)s-%(version)s-%(release)s.%(arch)s'
|
| |
+ filenames = [[fmt % selected[r], r] for r in sig_missing]
|
| |
+ for fname, rpm_id in sorted(filenames):
|
| |
+ avail = rpm_idx.get(rpm_id, {}).keys()
|
| |
+ outfile.write('%s: %r\n' % (fname, avail))
|
| |
+ outfile.close()
|
| |
+ self.session.uploadWrapper(missing_log, self.uploadpath)
|
| |
+ if (not opts['skip_missing_signatures']
|
| |
+ and not opts['allow_missing_signatures']):
|
| |
+ raise koji.GenericError('Unsigned packages found. See '
|
| |
+ 'missing_signatures.log')
|
| |
+ return pkgfile
|
| |
+
|
| |
+
|
| |
+ def write_kojipkgs(self):
|
| |
+ filename = os.path.join(self.repodir, 'kojipkgs')
|
| |
+ datafile = file(filename, 'w')
|
| |
+ try:
|
| |
+ json.dump(self.kojipkgs, datafile, indent=4)
|
| |
+ finally:
|
| |
+ datafile.close()
|
| |
+ # and upload too
|
| |
+ self.session.uploadWrapper(filename, self.uploadpath, 'kojipkgs')
|
| |
+
|
| |
+
|
| |
+
|
| |
class WaitrepoTask(BaseTaskHandler):
|
| |
|
| |
Methods = ['waitrepo']
|
| |
Based on PR #54
Rebased, with several fixes and adjustments