From 75a3cab1e891cd9e09d5de70a6986b16fbdab73e Mon Sep 17 00:00:00 2001 From: Tomas Kopecek Date: Jan 08 2024 13:02:20 +0000 Subject: PR#3913: draft builds Merges #3913 https://pagure.io/koji/pull-request/3913 Fixes #3918 https://pagure.io/koji/issue/3918 --- diff --git a/builder/kojid b/builder/kojid index b4e72b5..fe35e47 100755 --- a/builder/kojid +++ b/builder/kojid @@ -346,6 +346,10 @@ class BuildRoot(object): with koji._open_text_file(configfile, 'wt') as fo: fo.write(output) + def get_repo_dir(self): + pathinfo = koji.PathInfo(topdir='') + return pathinfo.repo(self.repoid, self.tag_name) + def _repositoryEntries(self, pi, plugin=False): entries = [] if plugin: @@ -688,6 +692,7 @@ class BuildRoot(object): """Return a list of packages from the buildroot Each member of the list is a dictionary containing the following fields: + - id, optional for internal rpm available in rpmlist.jsonl - name - version - release @@ -696,6 +701,8 @@ class BuildRoot(object): - payloadhash - size - buildtime + - external_repo, optional for external rpm + - location, optional for external rpm """ fields = ('name', 'version', @@ -734,6 +741,7 @@ class BuildRoot(object): finally: rpm.delMacro("_dbpath") self.markExternalRPMs(ret) + self.mapInternalRPMs(ret) return ret def getMavenPackageList(self, repodir): @@ -825,19 +833,18 @@ class BuildRoot(object): # substitute $arch in the url with the arch of the repo we're generating ext_url = erepo['url'].replace('$arch', self.br_arch) erepo_idx[ext_url] = erepo - pathinfo = koji.PathInfo(topdir='') - repodir = pathinfo.repo(self.repo_info['id'], self.repo_info['tag_name']) opts = dict([(k, getattr(self.options, k)) for k in ('topurl', 'topdir')]) opts['tempdir'] = self.options.workdir - repo_url = os.path.join(repodir, self.br_arch) + repo_url = os.path.join(self.get_repo_dir(), self.br_arch) # repo_url can start with '/', don't use os.path.join if self.options.topurl: repo_url = '%s/%s' % (self.options.topurl, repo_url) elif self.options.topdir: repo_url = '%s/%s' % (self.options.topdir, repo_url) - logging.error(repo_url) + self.logger.info("repo url of buildroot: %s is %s", self.name, repo_url) + tmpdir = os.path.join(self.tmpdir(), 'librepo-markExternalRPMs') koji.ensuredir(tmpdir) h = librepo.Handle() @@ -852,7 +859,7 @@ class BuildRoot(object): pkgorigins = r.getinfo(librepo.LRR_YUM_REPOMD)['origin']['location_href'] koji.util.rmtree(tmpdir) - relpath = os.path.join(repodir, self.br_arch, pkgorigins) + relpath = os.path.join(self.get_repo_dir(), self.br_arch, pkgorigins) with koji.openRemoteFile(relpath, **opts) as fo: # at this point we know there were external repos at the create event, # so there should be an origins file. @@ -886,6 +893,66 @@ class BuildRoot(object): rpm_info['external_repo'] = erepo rpm_info['location'] = erepo['external_repo_id'] + def mapInternalRPMs(self, rpmlist): + """ + Map each rpm item of rpmlist to a specific koji rpm entry based on repo contents + + The rpmList should be a list of dicts containing rpm header values. These entries will be + modified in place to include an id field when mapped. + + This mapping relies on the rpmlist.jsonl file for the repo. If this file is missing, the + code will fall back to querying the hub. + + This function will raise an error if there is a sigmd5 mismatch for a given rpm. + + :param list rpmlist: rpm list fetched from local RPMDB. + :return: None + """ + + opts = dict([(k, getattr(self.options, k)) for k in ('topurl', 'topdir')]) + rpmlist_path = os.path.join(self.get_repo_dir(), self.br_arch, 'rpmlist.jsonl') + compat_mode = False + try: + with koji.openRemoteFile(rpmlist_path, **opts) as fo: + repo_rpms = [json.loads(line) for line in fo] + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404: + self.logger.warning("Missing repo content file: %s", rpmlist_path) + # TODO: remove this workaround once we can assume that repos contain this file + repo_rpms = self.repo_draft_rpms() + compat_mode = True + else: + raise + fmt = "%(name)s-%(version)s-%(release)s.%(arch)s" + repo_rpms = {fmt % r: r for r in repo_rpms} + for rpm_info in rpmlist: + if 'external_repo' in rpm_info: + continue + nvra = fmt % rpm_info + data = repo_rpms.get(nvra) + if not data: + # happens a lot in compat mode because we only query for drafts + if not compat_mode: + self.logger.warning("%s not found in rpmlist.jsonl", nvra) + continue + # check payloadhash in case they are different + elif data['payloadhash'] != rpm_info['payloadhash']: + raise koji.BuildrootError( + "RPM: %s: payloadhash: %s mismatch expected %s in rpmlist.jsonl" + % (nvra, rpm_info['payloadhash'], data['payloadhash']) + ) + else: + # set rpm id + rpm_info['id'] = data['id'] + + def repo_draft_rpms(self): + drafts, draftbuilds = self.session.listTaggedRPMS( + tag=self.repo_info['tag_id'], + event=self.repo_info['create_event'], + latest=True, + draft=True) + return drafts + def path_without_to_within(self, path): """ Convert an absolute path from without the BuildRoot to one within. @@ -1102,6 +1169,9 @@ class BuildTask(BaseTaskHandler): % (data['name'], target_info['dest_tag_name'])) # TODO - more pre tests archlist = self.getArchList(build_tag, h, extra=extra_arches) + # pass draft option in + if opts.get('draft'): + data['draft'] = opts.get('draft') # let the system know about the build we're attempting if not self.opts.get('scratch'): # scratch builds do not get imported @@ -2176,6 +2246,9 @@ class WrapperRPMTask(BaseBuildTask): data['extra'] = {'source': {'original_url': source['url']}} if opts.get('custom_user_metadata'): data['extra']['custom_user_metadata'] = opts['custom_user_metadata'] + # pass draft option in + if opts.get('draft'): + data['draft'] = opts.get('draft') self.logger.info("Reading package config for %(name)s" % data) pkg_cfg = self.session.getPackageConfig(build_target['dest_tag'], data['name']) if not opts.get('skip_tag'): @@ -5249,6 +5322,7 @@ Subject: %(nvr)s %(result)s %(operation)s by %(user_name)s\r To: %(to_addrs)s\r X-Koji-Package: %(pkg_name)s\r X-Koji-NVR: %(nvr)s\r +X-Koji-Draft: %(draft)s\r X-Koji-User: %(user_name)s\r X-Koji-Status: %(status)s\r %(tag_headers)s\r @@ -5278,6 +5352,7 @@ Status: %(status)s\r user = self.session.getUser(user_info) pkg_name = build['package_name'] nvr = koji.buildLabel(build) + draft = build.get('draft', False) user_name = user['name'] from_addr = self.options.from_addr @@ -5349,6 +5424,7 @@ X-Koji-Tag: %(dest_tag)s\r X-Koji-Package: %(build_pkg_name)s\r X-Koji-Builder: %(build_owner)s\r X-Koji-Status: %(status)s\r +X-Koji-Draft: %(draft)s\r \r Package: %(build_nvr)s\r Tag: %(dest_tag)s\r @@ -5448,6 +5524,7 @@ Build Info: %(weburl)s/buildinfo?buildID=%(build_id)i\r build_nvr = koji.buildLabel(build) build_id = build['id'] build_owner = build['owner_name'] + draft = build.get('draft', False) # target comes from session.py:_get_build_target() dest_tag = None if target is not None: @@ -6335,10 +6412,16 @@ enabled=1 for a in self.compat[arch]: # note: self.compat includes noarch for non-src already rpm_iter, builds = self.session.listTaggedRPMS(tag_id, - event=opts['event'], arch=a, + event=opts['event'], + arch=a, latest=opts['latest'], - inherit=opts['inherit'], rpmsigs=True) + inherit=opts['inherit'], + rpmsigs=True) + for build in builds: + # disable draft for distRepo so far + if build.get('draft'): + raise koji.BuildError("Draft build: %s is not allowed" % build['nvr']) builddirs[build['id']] = koji.pathinfo.build(build) rpms += list(rpm_iter) diff --git a/cli/koji_cli/commands.py b/cli/koji_cli/commands.py index 9c52358..3cb4772 100644 --- a/cli/koji_cli/commands.py +++ b/cli/koji_cli/commands.py @@ -580,6 +580,8 @@ def handle_build(options, session, args): parser.add_option("--custom-user-metadata", type="str", help="Provide a JSON string of custom metadata to be deserialized and " "stored under the build's extra.custom_user_metadata field") + parser.add_option("--draft", action="store_true", + help="Build draft build instead") (build_opts, args) = parser.parse_args(args) if len(args) != 2: parser.error("Exactly two arguments (a build target and a SCM URL or srpm file) are " @@ -588,6 +590,8 @@ def handle_build(options, session, args): parser.error("--no-/rebuild-srpm is only allowed for --scratch builds") if build_opts.arch_override and not build_opts.scratch: parser.error("--arch_override is only allowed for --scratch builds") + if build_opts.scratch and build_opts.draft: + parser.error("--scratch and --draft cannot be both specfied") custom_user_metadata = {} if build_opts.custom_user_metadata: try: @@ -618,7 +622,7 @@ def handle_build(options, session, args): if build_opts.arch_override: opts['arch_override'] = koji.parse_arches(build_opts.arch_override) for key in ('skip_tag', 'scratch', 'repo_id', 'fail_fast', 'wait_repo', 'wait_builds', - 'rebuild_srpm'): + 'rebuild_srpm', 'draft'): val = getattr(build_opts, key) if val is not None: opts[key] = val @@ -830,6 +834,8 @@ def handle_wrapper_rpm(options, session, args): parser.add_option("--nowait", action="store_false", dest="wait", help="Don't wait on build") parser.add_option("--background", action="store_true", help="Run the build at a lower priority") + parser.add_option("--create-draft", action="store_true", + help="Create a new draft build instead") (build_opts, args) = parser.parse_args(args) if build_opts.inis: @@ -839,6 +845,12 @@ def handle_wrapper_rpm(options, session, args): if len(args) < 3: parser.error("You must provide a build target, a build ID or NVR, " "and a SCM URL to a specfile fragment") + if build_opts.create_draft: + print("Will create a draft build instead") + build_opts.create_build = True + if build_opts.scratch: + # TODO: --scratch and --create-build conflict too + parser.error("--scratch and --create-draft cannot be both specfied") activate_session(session, options) target = args[0] @@ -874,6 +886,8 @@ def handle_wrapper_rpm(options, session, args): opts['skip_tag'] = True if build_opts.scratch: opts['scratch'] = True + if build_opts.create_draft: + opts['draft'] = True task_id = session.wrapperRPM(build_id, url, target, priority, opts=opts) print("Created task: %d" % task_id) print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)) @@ -2727,11 +2741,15 @@ def anon_handle_list_tagged(goptions, session, args): parser.add_option("--ts", type='int', metavar="TIMESTAMP", help="query at last event before timestamp") parser.add_option("--repo", type='int', metavar="REPO#", help="query at event for a repo") + parser.add_option("--draft-only", action="store_true", help="Only list draft builds/rpms") + parser.add_option("--no-draft", action="store_true", help="Only list regular builds/rpms") (options, args) = parser.parse_args(args) if len(args) == 0: parser.error("A tag name must be specified") elif len(args) > 2: parser.error("Only one package name may be specified") + if options.no_draft and options.draft_only: + parser.error("--draft-only conflicts with --no-draft") ensure_connection(session, goptions) pathinfo = koji.PathInfo() package = None @@ -2753,6 +2771,10 @@ def anon_handle_list_tagged(goptions, session, args): options.rpms = True if options.type: opts['type'] = options.type + elif options.no_draft: + opts['draft'] = False + elif options.draft_only: + opts['draft'] = True event = koji.util.eventFromOpts(session, options) event_id = None if event: @@ -2798,7 +2820,9 @@ def anon_handle_list_tagged(goptions, session, args): fmt = "%(path)s" data = [x for x in data if 'path' in x] else: - fmt = "%(name)s-%(version)s-%(release)s.%(arch)s" + fmt = "%(name)s-%(version)s-%(release)s.%(arch)s%(draft_suffix)s" + for x in data: + x['draft_suffix'] = (' (,draft_%s)' % x['build_id']) if x.get('draft') else '' if options.sigs: fmt = "%(sigkey)s " + fmt else: @@ -2861,10 +2885,13 @@ def anon_handle_list_buildroot(goptions, session, args): fmt = "%(nvr)s.%(arch)s" order = sorted([(fmt % x, x) for x in list_rpms]) for nvra, rinfo in order: - if options.verbose and rinfo.get('is_update'): - print("%s [update]" % nvra) - else: - print(nvra) + line = nvra + if options.verbose: + if rinfo.get('draft'): + line += " (,draft_%s)" % rinfo['build_id'] + if rinfo.get('is_update'): + line += " [update]" + print(line) list_archives = session.listArchives(**opts) if list_archives: @@ -3403,6 +3430,8 @@ def anon_handle_list_builds(goptions, session, args): parser.add_option("--source", help="Only builds where the source field matches (glob pattern)") parser.add_option("--owner", help="List builds built by this owner") parser.add_option("--volume", help="List builds by volume ID") + parser.add_option("--draft-only", action="store_true", help="Only list draft builds") + parser.add_option("--no-draft", action="store_true", help="Only list regular builds") parser.add_option("-k", "--sort-key", action="append", metavar='FIELD', default=[], help="Sort the list by the named field. Allowed sort keys: " "build_id, owner_name, state") @@ -3419,6 +3448,12 @@ def anon_handle_list_builds(goptions, session, args): value = getattr(options, key) if value is not None: opts[key] = value + if options.no_draft and options.draft_only: + parser.error("--draft-only conflits with --no-draft") + elif options.no_draft: + opts['draft'] = False + elif options.draft_only: + opts['draft'] = True if options.cg: opts['cgID'] = options.cg if options.package: @@ -3516,10 +3551,11 @@ def anon_handle_list_builds(goptions, session, args): def anon_handle_rpminfo(goptions, session, args): "[info] Print basic information about an RPM" - usage = "usage: %prog rpminfo [options] [ ...]" + usage = "usage: %prog rpminfo [options] [ ...]" parser = OptionParser(usage=get_usage_str(usage)) parser.add_option("--buildroots", action="store_true", help="show buildroots the rpm was used in") + (options, args) = parser.parse_args(args) if len(args) < 1: parser.error("Please specify an RPM") @@ -3536,24 +3572,39 @@ def anon_handle_rpminfo(goptions, session, args): else: info['epoch'] = str(info['epoch']) + ":" if not info.get('external_repo_id', 0): - buildinfo = session.getBuild(info['build_id']) - buildinfo['name'] = buildinfo['package_name'] - buildinfo['arch'] = 'src' - if buildinfo['epoch'] is None: - buildinfo['epoch'] = "" + if info['arch'] == 'src': + srpminfo = info.copy() else: - buildinfo['epoch'] = str(buildinfo['epoch']) + ":" + srpminfo = None + srpms = session.listRPMs(buildID=info['build_id'], arches='src') + if srpms: + srpminfo = srpms[0] + if srpminfo['epoch'] is None: + srpminfo['epoch'] = "" + else: + srpminfo['epoch'] = str(srpminfo['epoch']) + ":" + buildinfo = session.getBuild(info['build_id']) print("RPM: %(epoch)s%(name)s-%(version)s-%(release)s.%(arch)s [%(id)d]" % info) + if info.get('draft'): + print("Draft: YES") if info.get('external_repo_id'): repo = session.getExternalRepo(info['external_repo_id']) print("External Repository: %(name)s [%(id)i]" % repo) print("External Repository url: %(url)s" % repo) else: + print("Build: %(nvr)s [%(id)d]" % buildinfo) print("RPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(info))) - print("SRPM: %(epoch)s%(name)s-%(version)s-%(release)s [%(id)d]" % buildinfo) - print("SRPM Path: %s" % - os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(buildinfo))) + if srpminfo: + srpm_str = "%(epoch)s%(name)s-%(version)s-%(release)s [%(id)d]" % srpminfo + srpm_path = os.path.join( + koji.pathinfo.build(buildinfo), + koji.pathinfo.rpm(srpminfo) + ) + else: + srpm_path = srpm_str = "(none)" + print("SRPM: %s" % srpm_str) + print("SRPM Path: %s" % srpm_path) print("Built: %s" % time.strftime('%a, %d %b %Y %H:%M:%S %Z', time.localtime(info['buildtime']))) print("SIGMD5: %(payloadhash)s" % info) @@ -3563,6 +3614,7 @@ def anon_handle_rpminfo(goptions, session, args): headers=["license"]) if 'license' in headers: print("License: %(license)s" % headers) + # kept for backward compatibility print("Build ID: %(build_id)s" % info) if info['buildroot_id'] is None: print("No buildroot data available") @@ -3619,6 +3671,8 @@ def anon_handle_buildinfo(goptions, session, args): info['arch'] = 'src' info['state'] = koji.BUILD_STATES[info['state']] print("BUILD: %(name)s-%(version)s-%(release)s [%(id)d]" % info) + if info.get('draft'): + print("Draft: YES") print("State: %(state)s" % info) if info['state'] == 'BUILDING': print("Reserved by: %(cg_name)s" % info) @@ -3633,6 +3687,9 @@ def anon_handle_buildinfo(goptions, session, args): else: print("Task: none") print("Finished: %s" % koji.formatTimeLong(info['completion_ts'])) + if info.get('promotion_ts'): + print("Promoted by: %(promoter_name)s" % info) + print("Promoted at: %s" % koji.formatTimeLong(info['promotion_ts'])) maven_info = session.getMavenBuild(info['id']) if maven_info: print("Maven groupId: %s" % maven_info['group_id']) @@ -8032,3 +8089,29 @@ def handle_scheduler_logs(goptions, session, args): for log in logs: print(mask % log) + + +def handle_promote_build(goptions, session, args): + "[misc] Promote a draft build" + usage = "usage: %prog promote-build [options] " + parser = OptionParser(usage=get_usage_str(usage)) + parser.add_option('-f', '--force', action='store_true', default=False, + help='force operation') + (options, args) = parser.parse_args(args) + if len(args) != 1: + parser.error("Please specify a draft build") + draft_build = args[0] + try: + draft_build = int(draft_build) + except ValueError: + pass + activate_session(session, goptions) + if options.force and not session.hasPerm('admin'): + parser.error("--force requires admin privilege") + binfo = session.getBuild(draft_build) + if not binfo: + error("No such build: %s" % draft_build) + if not binfo.get('draft'): + error("Not a draft build: %s" % draft_build) + rinfo = session.promoteBuild(binfo['id'], force=options.force) + print("%s has been promoted to %s" % (binfo['nvr'], rinfo['nvr'])) diff --git a/koji/__init__.py b/koji/__init__.py index 11f6cba..334b403 100644 --- a/koji/__init__.py +++ b/koji/__init__.py @@ -274,6 +274,7 @@ TAG_UPDATE_TYPES = Enum(( 'VOLUME_CHANGE', 'IMPORT', 'MANUAL', + 'DRAFT_PROMOTION', )) # BEGIN kojikamid dup # @@ -295,6 +296,10 @@ PRIO_DEFAULT = 20 DEFAULT_REQUEST_TIMEOUT = 60 * 60 * 12 DEFAULT_AUTH_TIMEOUT = 60 +# draft release constants +DRAFT_RELEASE_DELIMITER = ',' +DRAFT_RELEASE_FORMAT = '{target_release}' + DRAFT_RELEASE_DELIMITER + 'draft_{build_id}' + # BEGIN kojikamid dup # # Exceptions @@ -2023,6 +2028,7 @@ def downloadFile(url, path=None, fo=None): fo.write(chunk) finally: resp.close() + resp.raise_for_status() if resp.headers.get('Content-Length') and fo.tell() != int(resp.headers['Content-Length']): raise GenericError("Downloaded file %s doesn't match expected size (%s vs %s)" % (url, fo.tell(), resp.headers['Content-Length'])) @@ -3924,6 +3930,35 @@ def fixEncodingRecurse(value, fallback='iso8859-15', remove_nonprintable=False): return walker.walk() +def gen_draft_release(target_release, build_id): + """Generate draft_release based on input build information + + Currently, it's generated as {target_release},draft_{build_id} + + :param str target_release: target "release", which is the release part of rpms' nvra, + and will be the release of the build the draft build is going to be + promoted to. + :param int build_id: the build "id" part in draft_release (it's unchanged so it can be used to + keep the uniqueness of build NVR) + :return: draft release + :rtype: str + """ + return DRAFT_RELEASE_FORMAT.format(**locals()) + + +def parse_target_release(draft_release): + """Generate target_release from a draft release reversely + + :param str draft_release: release of a draft build + :rtype: str + :raises GenericError: if cannot get a valid target_release + """ + parts = draft_release.split(DRAFT_RELEASE_DELIMITER, 1) + if len(parts) != 2 or not parts[-1].startswith('draft_'): + raise GenericError("draft release: %s is not in valid format" % draft_release) + return parts[0] + + def add_file_logger(logger, fn): if not os.path.exists(fn): try: diff --git a/koji/plugin.py b/koji/plugin.py index 6306e97..89c0863 100644 --- a/koji/plugin.py +++ b/koji/plugin.py @@ -60,6 +60,8 @@ callbacks = { 'postRepoInit': [], 'preRepoDone': [], 'postRepoDone': [], + 'preBuildPromote': [], + 'postBuildPromote': [], 'preCommit': [], 'postCommit': [], # builder diff --git a/kojihub/kojihub.py b/kojihub/kojihub.py index 79ae413..4ea9163 100644 --- a/kojihub/kojihub.py +++ b/kojihub/kojihub.py @@ -1394,7 +1394,7 @@ def list_tags(build=None, package=None, perms=True, queryOpts=None, pattern=None def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, owner=None, - type=None, extra=False): + type=None, extra=False, draft=None): """Returns a list of builds for specified tag :param int tag: tag ID @@ -1406,6 +1406,10 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, :param str type: restrict the list to builds of the given type. Currently the supported types are 'maven', 'win', 'image', or any custom content generator btypes. :param bool extra: Set to "True" to get the build extra info + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only :returns [dict]: list of buildinfo dicts """ # build - id pkg_id version release epoch @@ -1430,9 +1434,12 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, ('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'), ('build.epoch', 'epoch'), ('build.state', 'state'), ('build.completion_time', 'completion_time'), + ('build.promotion_time', 'promotion_time'), ('build.start_time', 'start_time'), ('build.task_id', 'task_id'), + ('build.draft', 'draft'), ('users.id', 'owner_id'), ('users.name', 'owner_name'), + ('promoter.id', 'promoter_id'), ('promoter.name', 'promoter_name'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), ('volume.id', 'volume_id'), ('volume.name', 'volume_name'), ('package.id', 'package_id'), ('package.name', 'package_name'), @@ -1446,6 +1453,7 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, 'package ON package.id = build.pkg_id', 'volume ON volume.id = build.volume_id', 'users ON users.id = build.owner', + 'LEFT JOIN users AS promoter ON promoter.id = build.promoter', ] if type is None: @@ -1477,6 +1485,8 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, clauses.append('package.name = %(package)s') if owner: clauses.append('users.name = %(owner)s') + if draft is not None: + clauses.append(draft_clause(draft)) queryOpts = {'order': '-create_event'} # latest first if extra: fields.append(('build.extra', 'extra')) @@ -1514,7 +1524,7 @@ def readTaggedBuilds(tag, event=None, inherit=False, latest=False, package=None, def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, latest=True, - rpmsigs=False, owner=None, type=None, extra=True): + rpmsigs=False, owner=None, type=None, extra=True, draft=None): """Returns a list of rpms and builds for specified tag :param int|str tag: The tag name or ID to search @@ -1534,6 +1544,10 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, late :param str type: Filter by build type. Supported types are 'maven', 'win', and 'image'. :param bool extra: Set to "False" to skip the rpm extra info + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only :returns: a two-element list. The first element is the list of RPMs, and the second element is the list of builds. """ @@ -1544,7 +1558,7 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, late taglist += [link['parent_id'] for link in readFullInheritance(tag, event)] builds = readTaggedBuilds(tag, event=event, inherit=inherit, latest=latest, package=package, - owner=owner, type=type) + owner=owner, type=type, draft=draft) # index builds build_idx = dict([(b['build_id'], b) for b in builds]) @@ -1555,6 +1569,7 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, late ('rpminfo.arch', 'arch'), ('rpminfo.id', 'id'), ('rpminfo.epoch', 'epoch'), + ('rpminfo.draft', 'draft'), ('rpminfo.payloadhash', 'payloadhash'), ('rpminfo.size', 'size'), ('rpminfo.buildtime', 'buildtime'), @@ -1582,6 +1597,8 @@ def readTaggedRPMS(tag, package=None, arch=None, event=None, inherit=False, late clauses.append('rpminfo.arch IN %(arch)s') else: raise koji.GenericError('Invalid type for arch option: %s' % builtins.type(arch)) + if draft is not None: + clauses.append(draft_clause(draft, table='rpminfo')) if extra: fields.append(('rpminfo.extra', 'extra')) @@ -2702,6 +2719,7 @@ def repo_init(tag, task_id=None, with_src=False, with_debuginfo=False, event=Non # Note: the repo_include_all option is not recommended for common use # see https://pagure.io/koji/issue/588 for background rpms, builds = readTaggedRPMS(tag_id, event=event_id, inherit=True, latest=latest) + groups = readTagGroups(tag_id, event=event_id, inherit=True) blocks = [pkg for pkg in readPackageList(tag_id, event=event_id, inherit=True, with_owners=False).values() @@ -2736,8 +2754,9 @@ def repo_init(tag, task_id=None, with_src=False, with_debuginfo=False, event=Non for build in builds: relpath = relpathinfo.build(build) builddirs[build['id']] = relpath.lstrip('/') - # generate pkglist files + # generate pkglist and rpmlist files pkglist = {} + rpmlist = {} for repoarch in repo_arches: archdir = joinpath(repodir, repoarch) koji.ensuredir(archdir) @@ -2746,31 +2765,43 @@ def repo_init(tag, task_id=None, with_src=False, with_debuginfo=False, event=Non top_link = joinpath(archdir, 'toplink') os.symlink(top_relpath, top_link) pkglist[repoarch] = open(joinpath(archdir, 'pkglist'), 'wt', encoding='utf-8') - # NOTE - rpms is now an iterator + rpmlist[repoarch] = open(joinpath(archdir, 'rpmlist.jsonl'), 'wt', encoding='utf-8') + # NOTE - rpms is a generator for rpminfo in rpms: if not with_debuginfo and koji.is_debuginfo(rpminfo['name']): continue relpath = "%s/%s\n" % (builddirs[rpminfo['build_id']], relpathinfo.rpm(rpminfo)) + rpm_json = json.dumps(rpminfo, indent=None) + # must be one line for nl-delimited json arch = rpminfo['arch'] if arch == 'src': if with_src: for repoarch in repo_arches: pkglist[repoarch].write(relpath) + rpmlist[repoarch].write(rpm_json) + rpmlist[repoarch].write('\n') if with_separate_src: pkglist[arch].write(relpath) + rpmlist[arch].write(rpm_json) + rpmlist[arch].write('\n') elif arch == 'noarch': for repoarch in repo_arches: if repoarch == 'src': continue pkglist[repoarch].write(relpath) + rpmlist[repoarch].write(rpm_json) + rpmlist[repoarch].write('\n') else: repoarch = koji.canonArch(arch) if repoarch not in repo_arches: # Do not create a repo for arches not in the arch list for this tag continue pkglist[repoarch].write(relpath) + rpmlist[repoarch].write(rpm_json) + rpmlist[repoarch].write('\n') for repoarch in repo_arches: pkglist[repoarch].close() + rpmlist[repoarch].close() # write blocked package lists for repoarch in repo_arches: @@ -4357,6 +4388,7 @@ def get_build(buildInfo, strict=False): release epoch nvr + draft: Whether the build is draft or not state task_id: ID of the task that kicked off the build owner_id: ID of the user who kicked off the build @@ -4392,8 +4424,11 @@ def get_build(buildInfo, strict=False): fields = (('build.id', 'id'), ('build.version', 'version'), ('build.release', 'release'), ('build.id', 'build_id'), - ('build.epoch', 'epoch'), ('build.state', 'state'), + ('build.epoch', 'epoch'), + ('build.draft', 'draft'), + ('build.state', 'state'), ('build.completion_time', 'completion_time'), + ('build.promotion_time', 'promotion_time'), ('build.start_time', 'start_time'), ('build.task_id', 'task_id'), ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), @@ -4404,7 +4439,9 @@ def get_build(buildInfo, strict=False): ("date_part('epoch', events.time)", 'creation_ts'), ("date_part('epoch', build.start_time)", 'start_ts'), ("date_part('epoch', build.completion_time)", 'completion_ts'), + ("date_part('epoch', build.promotion_time)", 'promotion_ts'), ('users.id', 'owner_id'), ('users.name', 'owner_name'), + ('promoter.id', 'promoter_id'), ('promoter.name', 'promoter_name'), ('build.cg_id', 'cg_id'), ('build.source', 'source'), ('build.extra', 'extra')) @@ -4413,6 +4450,7 @@ def get_build(buildInfo, strict=False): 'package on build.pkg_id = package.id', 'volume on build.volume_id = volume.id', 'users on build.owner = users.id', + 'LEFT JOIN users AS promoter ON build.promoter = promoter.id', ] clauses = ['build.id = %(buildID)i'] query = QueryProcessor(columns=fields, aliases=aliases, values=locals(), @@ -4481,6 +4519,8 @@ def get_next_release(build_info, incr=1): This method searches the latest building, successful, or deleted build and returns the "next" release value for that version. + Note that draft builds are excluded while getting that latest build. + Examples: None becomes "1" @@ -4512,7 +4552,7 @@ def get_next_release(build_info, incr=1): query = QueryProcessor(tables=['build'], joins=['package ON build.pkg_id = package.id'], columns=['build.id', 'release'], clauses=['name = %(name)s', 'version = %(version)s', - 'state in %(states)s'], + 'state in %(states)s', 'NOT draft'], values=values, opts={'order': '-build.id', 'limit': 1}) result = query.executeOne() @@ -4579,7 +4619,8 @@ def get_rpm(rpminfo, strict=False, multi=False): """Get information about the specified RPM rpminfo may be any one of the following: - - a int ID + - the rpm id as an int + - the rpm id as a string - a string N-V-R.A - a string N-V-R.A@location - a map containing 'name', 'version', 'release', and 'arch' @@ -4593,6 +4634,7 @@ def get_rpm(rpminfo, strict=False, multi=False): - version - release - arch + - draft - epoch - payloadhash - size @@ -4607,10 +4649,70 @@ def get_rpm(rpminfo, strict=False, multi=False): If there is no RPM with the given ID, None is returned, unless strict is True in which case an exception is raised - If more than one RPM matches, and multi is True, then a list of results is - returned. If multi is False, a single match is returned (an internal one if - possible). + This function is normally expected to return a single rpm. However, there + are cases where the given rpminfo could refer to multiple rpms. This is + because of nvra overlap involving: + * draft rpms + * external rpms + + If more than one RPM matches, then in the default case (multi=False), this function + will choose the best option in order of preference: + 1. internal non-draft rpms (nvras are unique within this subset) + 2. internal draft rpms (highest rpm id) + 3. external rpms (highest rpm id) + OTOH if multi is True, then all matching results are returned as a list """ + # we can look up by id or NVRA + data = None + if isinstance(rpminfo, int): + data = {'id': rpminfo} + elif isinstance(rpminfo, str): + # either nvra or id as a string + try: + data = {'id': int(rpminfo)} + except ValueError: + data = koji.parse_NVRA(rpminfo) + elif isinstance(rpminfo, dict): + data = rpminfo.copy() + else: + raise koji.GenericError("Invalid type for rpminfo: %r" % type(rpminfo)) + + rpms = _get_rpms(data) + if multi: + return rpms + + # otherwise make sure we have a single rpm + if not rpms: + if strict: + raise koji.GenericError("No such rpm: %r" % data) + return None + elif len(rpms) == 1: + return rpms[0] + else: + # pick our preferred, as described above + nondraft = None + draft = None + external = None + for rinfo in rpms: + if rinfo['external_repo_id']: + if external is None or rinfo['id'] > external['id']: + external = rinfo + elif rinfo['draft']: + if draft is None or rinfo['id'] > draft['id']: + draft = rinfo + else: + # rinfo is internal and nondraft + if nondraft: + # should not happen + # none of our selection options should result in more than one nondraft build + raise koji.GenericError("Multiple nondraft rpm matches for: %r" % data) + else: + nondraft = rinfo + return nondraft or draft or external + + +def _get_rpms(data): + """Helper function for get_rpm""" fields = ( ('rpminfo.id', 'id'), ('build_id', 'build_id'), @@ -4620,6 +4722,7 @@ def get_rpm(rpminfo, strict=False, multi=False): ('release', 'release'), ('epoch', 'epoch'), ('arch', 'arch'), + ('draft', 'draft'), ('external_repo_id', 'external_repo_id'), ('external_repo.name', 'external_repo_name'), ('payloadhash', 'payloadhash'), @@ -4628,55 +4731,25 @@ def get_rpm(rpminfo, strict=False, multi=False): ('metadata_only', 'metadata_only'), ('extra', 'extra'), ) - # we can look up by id or NVRA - data = None - if isinstance(rpminfo, int): - data = {'id': rpminfo} - elif isinstance(rpminfo, str): - data = koji.parse_NVRA(rpminfo) - elif isinstance(rpminfo, dict): - data = rpminfo.copy() - else: - raise koji.GenericError("Invalid type for rpminfo: %r" % type(rpminfo)) clauses = [] if 'id' in data: clauses.append("rpminfo.id=%(id)s") else: clauses.append("rpminfo.name=%(name)s AND version=%(version)s " "AND release=%(release)s AND arch=%(arch)s") - retry = False if 'location' in data: data['external_repo_id'] = get_external_repo_id(data['location'], strict=True) - clauses.append("""external_repo_id = %(external_repo_id)i""") - elif not multi: - # try to match internal first, otherwise first matching external - retry = True # if no internal match - orig_clauses = list(clauses) # copy - clauses.append("""external_repo_id = 0""") - + clauses.append("""external_repo_id = %(external_repo_id)s""") joins = ['external_repo ON rpminfo.external_repo_id = external_repo.id'] query = QueryProcessor(columns=[f[0] for f in fields], aliases=[f[1] for f in fields], tables=['rpminfo'], joins=joins, clauses=clauses, values=data, transform=_fix_rpm_row) - if multi: - return query.execute() - ret = query.executeOne() - if ret: - return ret - if retry: - # at this point we have just an NVRA with no internal match. Open it up to externals - query.clauses = orig_clauses - ret = query.executeOne() - if not ret: - if strict: - raise koji.GenericError("No such rpm: %r" % data) - return None - return ret + return query.execute() def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID=None, hostID=None, - arches=None, queryOpts=None): + arches=None, queryOpts=None, draft=None): """List RPMS. If buildID, imageID and/or buildrootID are specified, restrict the list of RPMs to only those RPMs that are part of that build, or were built in that buildroot. If componentBuildrootID is specified, @@ -4691,6 +4764,7 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID - nvr (synthesized for sorting purposes) - arch - epoch + - draft - payloadhash - size - buildtime @@ -4706,12 +4780,22 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID - is_update If no build has the given ID, or the build generated no RPMs, - an empty list is returned.""" + an empty list is returned. + + The option draft with a bool/None value is to filter rpm by that + rpm belongs to a draft build, a regular build or both (default). It stands for: + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only + """ + fields = [('rpminfo.id', 'id'), ('rpminfo.name', 'name'), ('rpminfo.version', 'version'), ('rpminfo.release', 'release'), ("rpminfo.name || '-' || rpminfo.version || '-' || rpminfo.release", 'nvr'), ('rpminfo.arch', 'arch'), - ('rpminfo.epoch', 'epoch'), ('rpminfo.payloadhash', 'payloadhash'), + ('rpminfo.epoch', 'epoch'), + ('rpminfo.draft', 'draft'), + ('rpminfo.payloadhash', 'payloadhash'), ('rpminfo.size', 'size'), ('rpminfo.buildtime', 'buildtime'), ('rpminfo.build_id', 'build_id'), ('rpminfo.buildroot_id', 'buildroot_id'), ('rpminfo.external_repo_id', 'external_repo_id'), @@ -4749,6 +4833,8 @@ def list_rpms(buildID=None, buildrootID=None, imageID=None, componentBuildrootID clauses.append('rpminfo.arch = %(arches)s') else: raise koji.GenericError('Invalid type for "arches" parameter: %s' % type(arches)) + if draft is not None: + clauses.append(draft_clause(draft)) fields, aliases = zip(*fields) query = QueryProcessor(columns=fields, aliases=aliases, @@ -5959,7 +6045,7 @@ def check_volume_policy(data, strict=False, default=None): return None -def apply_volume_policy(build, strict=False): +def apply_volume_policy(build, strict=False, dry_run=False): """Apply volume policy, moving build as needed build should be the buildinfo returned by get_build() @@ -5967,12 +6053,16 @@ def apply_volume_policy(build, strict=False): The strict options determines what happens in the case of a bad policy. If strict is True, an exception will be raised. Otherwise, the existing volume we be retained. + + If dry_run is True, return the volume instead of doing the actual moving. """ policy_data = {'build': build} task_id = extract_build_task(build) if task_id: policy_data.update(policy_data_from_task(task_id)) volume = check_volume_policy(policy_data, strict=strict) + if dry_run: + return volume if volume is None: # just leave the build where it is return @@ -5985,6 +6075,10 @@ def apply_volume_policy(build, strict=False): def new_build(data, strict=False): """insert a new build entry + If the build to create is a draft, the release field is the target release + rather than its actual release with draft suffix. The draft suffix will be + generated here as ",draft_". + If strict is specified, raise an exception, if build already exists. """ @@ -6007,11 +6101,14 @@ def new_build(data, strict=False): raise koji.GenericError("No %s value for build" % f) if 'extra' in data: try: + # backwards compatible for data in callback data['extra'] = json.dumps(data['extra']) except Exception: raise koji.GenericError("No such build extra data: %(extra)r" % data) else: data['extra'] = None + if 'draft' in data: + data['draft'] = bool(data['draft']) # provide a few default values data.setdefault('state', koji.BUILD_STATES['COMPLETE']) @@ -6021,12 +6118,16 @@ def new_build(data, strict=False): data.setdefault('owner', context.session.user_id) data.setdefault('task_id', None) data.setdefault('volume_id', 0) + data.setdefault('draft', False) # check for existing build old_binfo = get_build(data) if old_binfo: - if strict: - raise koji.GenericError(f'Existing build found: {old_binfo}') + old_str = '%(nvr)s (id=%(id)s)' % old_binfo + if data['draft']: + raise koji.GenericError(f'Target build already exists: {old_str}') + elif strict: + raise koji.GenericError(f'Existing build found: {old_str}') recycle_build(old_binfo, data) # Raises exception if there is a problem return old_binfo['id'] @@ -6034,12 +6135,23 @@ def new_build(data, strict=False): new=data['state'], info=data) # insert the new data - insert_data = dslice(data, ['pkg_id', 'version', 'release', 'epoch', 'state', 'volume_id', - 'task_id', 'owner', 'start_time', 'completion_time', 'source', - 'extra']) + insert_data = dslice(data, ['pkg_id', 'version', 'release', 'epoch', 'draft', 'state', + 'volume_id', 'task_id', 'owner', 'start_time', 'completion_time', + 'source', 'extra']) if 'cg_id' in data: insert_data['cg_id'] = data['cg_id'] data['id'] = insert_data['id'] = nextval('build_id_seq') + # handle draft suffix in release + if data.get('draft'): + data['release'] = insert_data['release'] = koji.gen_draft_release( + data['release'], data['id'] + ) + # it's still possible to already have a build with the same nvr + draft_nvr = dslice(data, ['name', 'version', 'release']) + if find_build_id(draft_nvr): + raise koji.GenericError( + f"The build already exists: {draft_nvr}" + ) insert = InsertProcessor('build', data=insert_data) insert.execute() new_binfo = get_build(data['id'], strict=True) @@ -6217,16 +6329,22 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None) build['volume_name'] = vol['name'] if build_id is None: + # This should not be possible + logger.error('Calling import_build without a build_id is deprecated') build_id = new_build(build) binfo = get_build(build_id, strict=True) new_typed_build(binfo, 'rpm') else: # build_id was passed in - sanity check + build['id'] = build_id binfo = get_build(build_id, strict=True) st_complete = koji.BUILD_STATES['COMPLETE'] st_old = binfo['state'] + koji.plugin.run_callbacks('preBuildStateChange', attribute='state', old=st_old, new=st_complete, info=binfo) + if binfo.get('draft'): + build['release'] = koji.gen_draft_release(build['release'], build['id']) for key in ('name', 'version', 'release', 'epoch', 'task_id'): if build[key] != binfo[key]: raise koji.GenericError( @@ -6278,6 +6396,8 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None): hdr = koji.get_rpm_header(fn) rpminfo = koji.get_header_fields(hdr, ['name', 'version', 'release', 'epoch', 'sourcepackage', 'arch', 'buildtime', 'sourcerpm']) + draft = True if buildinfo and buildinfo.get('draft') else False + rpminfo['draft'] = draft if rpminfo['sourcepackage'] == 1: rpminfo['arch'] = "src" @@ -6288,7 +6408,12 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None): raise koji.GenericError("bad filename: %s (expected %s)" % (basename, expected)) if buildinfo is None: - # figure it out for ourselves + # This only happens when we're called from importRPM + if draft: + # shouldn't happen with current code + raise koji.GenericError('rpm import is not supported for draft builds') + # the existing build got by rpminfo below should never be draft, + # because "," is an invalid char for rpm "release" if rpminfo['sourcepackage'] == 1: buildinfo = get_build(rpminfo, strict=False) if not buildinfo: @@ -6303,13 +6428,13 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None): # XXX - handle case where package is not a source rpm # and we still need to create a new build raise koji.GenericError('No such build') - state = koji.BUILD_STATES[buildinfo['state']] - if state in ('FAILED', 'CANCELED', 'DELETED'): - nvr = "%(name)s-%(version)s-%(release)s" % buildinfo - raise koji.GenericError("Build is %s: %s" % (state, nvr)) elif not wrapper: # only enforce the srpm name matching the build for non-wrapper rpms - srpmname = "%(name)s-%(version)s-%(release)s.src.rpm" % buildinfo + nvrinfo = buildinfo.copy() + if draft: + # for draft build, change release to target_release + nvrinfo['release'] = koji.parse_target_release(buildinfo['release']) + srpmname = "%(name)s-%(version)s-%(release)s.src.rpm" % nvrinfo # either the sourcerpm field should match the build, or the filename # itself (for the srpm) if rpminfo['sourcepackage'] != 1: @@ -6320,6 +6445,11 @@ def import_rpm(fn, buildinfo=None, brootid=None, wrapper=False, fileinfo=None): raise koji.GenericError("srpm mismatch for %s: %s (expected %s)" % (fn, basename, srpmname)) + state = koji.BUILD_STATES[buildinfo['state']] + if state in ('FAILED', 'CANCELED', 'DELETED'): + nvr = "%(name)s-%(version)s-%(release)s" % buildinfo + raise koji.GenericError("Build is %s: %s" % (state, nvr)) + # if we're adding an rpm to it, then this build is of rpm type # harmless if build already has this type new_typed_build(buildinfo, 'rpm') @@ -6391,6 +6521,7 @@ def cg_init_build(cg, data): other values will be ignored anyway (owner, state, ...) :return: dict with build_id and token """ + reject_draft(data) assert_cg(cg) cg_id = lookup_name('content_generator', cg, strict=True)['id'] data['owner'] = context.session.user_id @@ -6670,6 +6801,10 @@ class CG_Importer(object): if (koji.BUILD_STATES[buildinfo['state']] not in ('CANCELED', 'FAILED')): raise koji.GenericError("Build already exists: %r" % buildinfo) # note: the checks in recycle_build will also apply when we call new_build later + + if buildinfo: + reject_draft(buildinfo) + # gather needed data buildinfo = dslice(metadata['build'], ['name', 'version', 'release', 'extra', 'source']) if 'build_id' in metadata['build']: @@ -6896,11 +7031,14 @@ class CG_Importer(object): if 'id' in comp: # not in metadata spec, and will confuse get_rpm raise koji.GenericError("Unexpected 'id' field in component") + # rpm is no more unique with NVRA as draft build is introduced rinfo = get_rpm(comp, strict=False) if not rinfo: # XXX - this is a temporary workaround until we can better track external refs self.log_warning("IGNORING unmatched rpm component: %r" % comp) return None + # TODO: we should consider how to handle them once draft build is enabled for CG + reject_draft(rinfo, is_rpm=True) if rinfo['payloadhash'] != comp['sigmd5']: # XXX - this is a temporary workaround until we can better track external refs self.log_warning("IGNORING rpm component (md5 mismatch): %r" % comp) @@ -7286,6 +7424,7 @@ def merge_scratch(task_id): if not build: raise koji.ImportError('no such build: %(name)s-%(version)s-%(release)s' % build_nvr) + reject_draft(build, koji.ImportError(f"build to import is a draft build: {build['nvr']}")) if build['state'] != koji.BUILD_STATES['COMPLETE']: raise koji.ImportError('%s did not complete successfully' % build['nvr']) if not build['task_id']: @@ -7520,7 +7659,6 @@ def new_image_build(build_info): def new_typed_build(build_info, btype): """Mark build as a given btype""" - btype_id = lookup_name('btype', btype, strict=True)['id'] query = QueryProcessor(tables=['build_types'], columns=['build_id'], clauses=['build_id = %(build_id)i', 'btype_id = %(btype_id)i'], @@ -7557,6 +7695,7 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No buildroot_id: the id of the buildroot the archive was built in (may be None) fileinfo: content generator metadata for file (may be None) """ + reject_draft(buildinfo) if fileinfo is None: fileinfo = {} @@ -8370,7 +8509,7 @@ def query_history(tables=None, **kwargs): return ret -def untagged_builds(name=None, queryOpts=None): +def untagged_builds(name=None, queryOpts=None, draft=None): """Returns the list of untagged builds""" st_complete = koji.BUILD_STATES['COMPLETE'] # following can be achieved with simple query but with @@ -8387,6 +8526,9 @@ def untagged_builds(name=None, queryOpts=None): if name is not None: clauses.append('package.name = %(name)s') + if draft is not None: + clauses.append(draft_clause(draft)) + query = QueryProcessor(tables=['build', 'package'], columns=['build.id', 'package.name', 'build.version', 'build.release'], aliases=['id', 'name', 'version', 'release'], @@ -8671,6 +8813,7 @@ def _delete_build(binfo): update = UpdateProcessor('build', values=values, clauses=['id=%(build_id)i'], data={'state': st_deleted}) update.execute() + _delete_build_symlinks(binfo) # now clear the build dir builddir = koji.pathinfo.build(binfo) if os.path.exists(builddir): @@ -8680,15 +8823,49 @@ def _delete_build(binfo): attribute='state', old=st_old, new=st_deleted, info=binfo) +def _delete_build_symlinks(binfo): + """Remove symlinks pointing to the build + + These include the symlink created by ensure_volume_symlink and the + symlink created when draft builds are promoted. + """ + + base_vol = lookup_name('volume', 'DEFAULT', strict=True) + if binfo['volume_id'] != base_vol['id']: + # remove the link created by ensure_volume_symlinks + base_binfo = binfo.copy() + base_binfo['volume_id'] = base_vol['id'] + base_binfo['volume_name'] = base_vol['name'] + basedir = koji.pathinfo.build(base_binfo) + if os.path.islink(basedir): + os.unlink(basedir) + + if not binfo['draft']: + # if the build isn't a draft, it may once have been + draft_release = koji.gen_draft_release(binfo['release'], binfo['id']) + for check_vol in list_volumes(): + # the build could have been on any volume when promoted + check_binfo = binfo.copy() + check_binfo['volume_id'] = check_vol['id'] + check_binfo['volume_name'] = check_vol['name'] + check_binfo['release'] = draft_release + checkdir = koji.pathinfo.build(check_binfo) + if os.path.islink(checkdir): + os.unlink(checkdir) + + def reset_build(build): """Reset a build so that it can be reimported WARNING: this function is highly destructive. use with care. nulls task_id sets state to CANCELED + sets volume to DEFAULT clears all referenced data in other tables, including buildroot and archive component tables + draft and extra are kept + after reset, only the build table entry is left """ # Only an admin may do this @@ -8767,6 +8944,8 @@ def reset_build(build): update = UpdateProcessor('build', clauses=['id=%(id)s'], values={'id': binfo['id']}, data={'state': binfo['state'], 'task_id': None, 'volume_id': 0}) update.execute() + + _delete_build_symlinks(binfo) # now clear the build dir builddir = koji.pathinfo.build(binfo) if os.path.exists(builddir): @@ -9649,6 +9828,20 @@ class IsBuildOwnerTest(koji.policy.BaseSimpleTest): return False +class IsDraftTest(koji.policy.BaseSimpleTest): + """Check if the build is a draft build""" + name = "is_draft" + + def run(self, data): + if 'draft' in data: + return data['draft'] + if 'build' in data: + build = get_build(data['build']) + return build.get('draft', False) + # default... + return False + + class UserInGroupTest(koji.policy.BaseSimpleTest): """Check if user is in group(s) @@ -10028,6 +10221,9 @@ def importImageInternal(task_id, build_info, imgdata): data = add_external_rpm(an_rpm, location, strict=False) else: data = get_rpm(an_rpm, strict=True) + # unlike buildroot, we simply reject draft rpms as rpm components + # because we probably don't want to keep the nvra uniqueness here. + reject_draft(data, is_rpm=True) rpm_ids.append(data['id']) # we sort to try to avoid deadlock issues rpm_ids.sort() @@ -10048,6 +10244,107 @@ def importImageInternal(task_id, build_info, imgdata): build=build_info, fullpath=fullpath) +def _promote_build(build, force=False): + """Promote a draft build to a regular build. + + - The build type is limited to rpm so far. + - The promoting action cannot be revoked. + - The release wil be changed to the target one, so build_id isn't changed + - buildpath will be changed as well and the old build path will symlink + to the new one, so both paths still will be existing until deleted. + + :param build: A build ID (int), a NVR (string), or a dict containing + "name", "version" and "release" of a draft build + :type build: int, str, dict + :param bool force: If False (default), Koji will check this + operation against the draft_promotion hub policy. If hub + policy does not allow the current user to promote the draft build, + then this method will raise an error. + If True, then this method will bypass hub policy settings. + Only admin users can set force to True. + :returns: latest build info + :rtype: dict + """ + context.session.assertLogin() + user = get_user(context.session.user_id) + + binfo = get_build(build, strict=True) + err_fmt = f"Cannot promote build {binfo['nvr']}. Reason: {{}}" + if not binfo.get('draft'): + raise koji.GenericError(err_fmt.format("Not a draft build")) + state = koji.BUILD_STATES[binfo['state']] + if state != 'COMPLETE': + raise koji.GenericError(err_fmt.format(f'state ({state}) is not COMPLETE.')) + + old_release = binfo['release'] + target_release = koji.parse_target_release(old_release) + + # drop id to get build by NVR + target_build = dslice(binfo, ['name', 'version']) + target_build['release'] = target_release + old_build = get_build(target_build, strict=False) + if old_build: + raise koji.GenericError(err_fmt.format( + f"Target build exists: {old_build['nvr']}(#{old_build['id']})" + )) + + # policy checks + policy_data = { + 'build': binfo['id'], + 'target_release': target_release, + 'user_id': user['id'] + } + assert_policy('draft_promotion', policy_data, force=force) + + koji.plugin.run_callbacks( + 'preBuildPromote', + draft_release=old_release, + target_release=target_release, + build=binfo, + user=user + ) + + update = UpdateProcessor('build', clauses=['id=%(id)i'], values=binfo) + update.set(draft=False, release=target_release, promoter=user['id']) + update.rawset(promotion_time='now()') + update.execute() + # Note: changing build.draft will implicitly update corresponding rpminfo.draft fields + # due to the ON UPDATE CASCADE foreign key constraint + + new_binfo = get_build(binfo['id'], strict=True) + oldpath = koji.pathinfo.build(binfo) + newpath = koji.pathinfo.build(new_binfo) + safer_move(oldpath, newpath) + ensure_volume_symlink(new_binfo) + + # provide a symlink at original draft location + # we point to the default volume in case the build moves in the future + base_vol = lookup_name('volume', 'DEFAULT', strict=True) + base_binfo = new_binfo.copy() + base_binfo['volume_id'] = base_vol['id'] + base_binfo['volume_name'] = base_vol['name'] + basedir = koji.pathinfo.build(base_binfo) + relpath = os.path.relpath(basedir, os.path.dirname(oldpath)) + os.symlink(relpath, oldpath) + + # apply volume policy in case it's changed by release update. + apply_volume_policy(new_binfo, strict=False) + + # adding DRAFT_PROMOTION for kojira, + # as the latest promoted build should be that latest one. + for tag in list_tags(build=binfo['id']): + set_tag_update(tag['id'], 'DRAFT_PROMOTION') + + koji.plugin.run_callbacks( + 'postBuildPromote', + draft_release=old_release, + target_release=target_release, + build=new_binfo, + user=user + ) + return new_binfo + + def _delete_event_id(): """Helper function to bump event""" try: @@ -10798,23 +11095,24 @@ class RootExports(object): build = get_build(build, strict=True) return apply_volume_policy(build, strict) - def createEmptyBuild(self, name, version, release, epoch, owner=None): + def createEmptyBuild(self, name, version, release, epoch, owner=None, draft=False): """Creates empty build entry :param str name: build name :param str version: build version :param str release: release version :param str epoch: epoch version - :param userInfo: a str (Kerberos principal or name) or an int (user id) + :param owner: a str (Kerberos principal or name) or an int (user id) or a dict: - id: User's ID - name: User's name - krb_principal: Kerberos principal + :param bool draft: create a draft build or not :return: int build ID """ context.session.assertPerm('admin') data = {'name': name, 'version': version, 'release': release, - 'epoch': epoch} + 'epoch': epoch, 'draft': draft} if owner is not None: data['owner'] = owner return new_build(data) @@ -10836,7 +11134,9 @@ class RootExports(object): - artifact_id: Artifact's ID - version: version :raises: GenericError if type for build_info is not dict, when build isn`t existing. + :raises: GenericError if draft: True in buildinfo, when build isn't existing. :raises: GenericError if build info doesn't have mandatory keys. + :raises: GenericError if build is a draft, when it's existing. """ context.session.assertPerm('maven-import') if not context.opts.get('EnableMaven'): @@ -10845,11 +11145,13 @@ class RootExports(object): if not build: if not isinstance(build_info, dict): raise koji.GenericError('Invalid type for build_info: %s' % type(build_info)) + reject_draft(build_info) try: build_id = new_build(dslice(build_info, ('name', 'version', 'release', 'epoch'))) except KeyError as cm: raise koji.GenericError("Build info doesn't have mandatory %s key" % cm) build = get_build(build_id, strict=True) + reject_draft(build) new_maven_build(build, maven_info) def createWinBuild(self, build_info, win_info): @@ -10866,7 +11168,9 @@ class RootExports(object): :param dict win_info: - platform: build platform :raises: GenericError if type for build_info is not dict, when build isn`t existing. + :raises: GenericError if draft: True in buildinfo, when build isn't existing. :raises: GenericError if build info doesn't have mandatory keys. + :raises: GenericError if build is a draft, when it's existing. """ context.session.assertPerm('win-import') if not context.opts.get('EnableWin'): @@ -10875,11 +11179,13 @@ class RootExports(object): if not build: if not isinstance(build_info, dict): raise koji.GenericError('Invalid type for build_info: %s' % type(build_info)) + reject_draft(build_info) try: build_id = new_build(dslice(build_info, ('name', 'version', 'release', 'epoch'))) except KeyError as cm: raise koji.GenericError("Build info doesn't have mandatory %s key" % cm) build = get_build(build_id, strict=True) + reject_draft(build) new_win_build(build, win_info) def createImageBuild(self, build_info): @@ -10895,18 +11201,22 @@ class RootExports(object): - release: build release - epoch: build epoch :raises: GenericError if type for build_info is not dict, when build isn`t existing. + :raises: GenericError if draft: True in buildinfo, when build isn't existing. :raises: GenericError if build info doesn't have mandatory keys. + :raises: GenericError if build is a draft, when it's existing. """ context.session.assertPerm('image-import') build = get_build(build_info) if not build: if not isinstance(build_info, dict): raise koji.GenericError('Invalid type for build_info: %s' % type(build_info)) + reject_draft(build_info) try: build_id = new_build(dslice(build_info, ('name', 'version', 'release', 'epoch'))) except KeyError as cm: raise koji.GenericError("Build info doesn't have mandatory %s key" % cm) build = get_build(build_id, strict=True) + reject_draft(build) new_image_build(build) def importRPM(self, path, basename): @@ -10924,6 +11234,7 @@ class RootExports(object): add_rpm_sig(rpminfo['id'], koji.rip_rpm_sighdr(fn)) for tag in list_tags(build=rpminfo['build_id']): set_tag_update(tag['id'], 'IMPORT') + return rpminfo def mergeScratch(self, task_id): """Import the rpms generated by a scratch build, and associate @@ -11710,29 +12021,34 @@ class RootExports(object): task.setPriority(priority, recurse=recurse) def listTagged(self, tag, event=None, inherit=False, prefix=None, latest=False, package=None, - owner=None, type=None, strict=True, extra=False): + owner=None, type=None, strict=True, extra=False, draft=None): """List builds tagged with tag. :param int|str tag: tag name or ID number :param int event: event ID :param bool inherit: If inherit is True, follow the tag hierarchy and return - a list of tagged builds for all tags in the tree + a list of tagged builds for all tags in the tree :param str prefix: only builds whose package name starts with that prefix - :param bool|int latest: True for latest build per package, - N to get N latest builds per package. + :param bool|int latest: True for latest build per package, + N to get N latest builds per package. :param str package: only builds of the specified package :param owner: only builds of the specified owner :param str type: only builds of the given btype (such as maven or image) :param bool strict: If tag doesn't exist, an exception is raised, - unless strict is False in which case returns an empty list. + unless strict is False in which case returns an empty list. :param bool extra: Set to "True" to get the build extra info + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only """ # lookup tag id tag = get_tag(tag, strict=strict, event=event) if not tag: return [] results = readTaggedBuilds(tag['id'], event, inherit=inherit, latest=latest, - package=package, owner=owner, type=type, extra=extra) + package=package, owner=owner, type=type, extra=extra, + draft=draft) if prefix: prefix = prefix.lower() results = [build for build in results @@ -11740,7 +12056,8 @@ class RootExports(object): return results def listTaggedRPMS(self, tag, event=None, inherit=False, latest=False, package=None, arch=None, - rpmsigs=False, owner=None, type=None, strict=True, extra=True): + rpmsigs=False, owner=None, type=None, strict=True, extra=True, + draft=None): """List rpms and builds within tag. :param int|str tag: tag name or ID number @@ -11759,6 +12076,10 @@ class RootExports(object): :param bool strict: If tag doesn't exist, an exception is raised, unless strict is False in which case returns an empty list. :param bool extra: Set to "False" to skip the rpms extra info + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only """ # lookup tag id tag = get_tag(tag, strict=strict, event=event) @@ -11766,7 +12087,7 @@ class RootExports(object): return [] return readTaggedRPMS(tag['id'], event=event, inherit=inherit, latest=latest, package=package, arch=arch, rpmsigs=rpmsigs, owner=owner, - type=type, extra=extra) + type=type, extra=extra, draft=draft) def listTaggedArchives(self, tag, event=None, inherit=False, latest=False, package=None, type=None, strict=True, extra=True): @@ -11794,7 +12115,7 @@ class RootExports(object): def listBuilds(self, packageID=None, userID=None, taskID=None, prefix=None, state=None, volumeID=None, source=None, createdBefore=None, createdAfter=None, completeBefore=None, completeAfter=None, type=None, typeInfo=None, - queryOpts=None, pattern=None, cgID=None): + queryOpts=None, pattern=None, cgID=None, draft=None): """ Return a list of builds that match the given parameters @@ -11830,6 +12151,10 @@ class RootExports(object): fields are matched For type=win, the provided platform fields are matched + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only :returns: Returns a list of maps. Each map contains the following keys: @@ -11837,6 +12162,7 @@ class RootExports(object): - version - release - epoch + - draft - state - package_id - package_name @@ -11844,6 +12170,8 @@ class RootExports(object): - nvr (synthesized for sorting purposes) - owner_id - owner_name + - promoter_id + - promoter_name - volume_id - volume_name - source @@ -11854,6 +12182,8 @@ class RootExports(object): - start_ts - completion_time - completion_ts + - promotion_time + - promotion_ts - task_id - extra @@ -11871,8 +12201,11 @@ class RootExports(object): """ fields = [('build.id', 'build_id'), ('build.version', 'version'), ('build.release', 'release'), - ('build.epoch', 'epoch'), ('build.state', 'state'), + ('build.epoch', 'epoch'), + ('build.draft', 'draft'), + ('build.state', 'state'), ('build.completion_time', 'completion_time'), + ('build.promotion_time', 'promotion_time'), ('build.start_time', 'start_time'), ('build.source', 'source'), ('build.extra', 'extra'), @@ -11881,17 +12214,22 @@ class RootExports(object): ("date_part('epoch', events.time)", 'creation_ts'), ("date_part('epoch', build.start_time)", 'start_ts'), ("date_part('epoch', build.completion_time)", 'completion_ts'), + ("date_part('epoch', build.promotion_time)", 'promotion_ts'), ('package.id', 'package_id'), ('package.name', 'package_name'), ('package.name', 'name'), ('volume.id', 'volume_id'), ('volume.name', 'volume_name'), ("package.name || '-' || build.version || '-' || build.release", 'nvr'), - ('users.id', 'owner_id'), ('users.name', 'owner_name')] + ('users.id', 'owner_id'), ('users.name', 'owner_name'), + ('promoter.id', 'promoter_id'), ('promoter.name', 'promoter_name'), + ] tables = ['build'] joins = ['LEFT JOIN events ON build.create_event = events.id', 'LEFT JOIN package ON build.pkg_id = package.id', 'LEFT JOIN volume ON build.volume_id = volume.id', - 'LEFT JOIN users ON build.owner = users.id'] + 'LEFT JOIN users ON build.owner = users.id', + 'LEFT JOIN users AS promoter ON build.promoter = promoter.id', + ] clauses = [] if packageID is not None: packageID = get_package_id(packageID) @@ -11977,6 +12315,8 @@ class RootExports(object): btype_id = btype['id'] joins.append('build_types ON build.id = build_types.build_id ' 'AND btype_id = %(btype_id)s') + if draft is not None: + clauses.append(draft_clause(draft)) query = QueryProcessor(columns=[pair[0] for pair in fields], aliases=[pair[1] for pair in fields], @@ -11986,7 +12326,7 @@ class RootExports(object): return query.iterate() - def getLatestBuilds(self, tag, event=None, package=None, type=None): + def getLatestBuilds(self, tag, event=None, package=None, type=None, draft=None): """List latest builds for tag (inheritance enabled, wrapper of readTaggedBuilds) :param int tag: tag ID @@ -11994,15 +12334,21 @@ class RootExports(object): :param int package: filter on package name :param str type: restrict the list to builds of the given type. Currently the supported types are 'maven', 'win', 'image', or any custom content generator btypes. + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only :returns [dict]: list of buildinfo dicts """ if not isinstance(tag, int): # lookup tag id tag = get_tag_id(tag, strict=True) - return readTaggedBuilds(tag, event, inherit=True, latest=True, package=package, type=type) + return readTaggedBuilds(tag, event, inherit=True, latest=True, package=package, type=type, + draft=draft) - def getLatestRPMS(self, tag, package=None, arch=None, event=None, rpmsigs=False, type=None): + def getLatestRPMS(self, tag, package=None, arch=None, event=None, rpmsigs=False, type=None, + draft=None): """List latest RPMS for tag (inheritance enabled, wrapper of readTaggedBuilds) :param int|str tag: The tag name or ID to search @@ -12015,6 +12361,10 @@ class RootExports(object): :param bool rpmsigs: query will return one record per rpm/signature combination :param str type: Filter by build type. Supported types are 'maven', 'win', and 'image'. + :param bool draft: bool or None option that indicates the filter based on draft field + - None: no filter (both draft and regular builds) + - True: draft only + - False: regular only :returns: a two-element list. The first element is the list of RPMs, and the second element is the list of builds. """ @@ -12023,7 +12373,7 @@ class RootExports(object): # lookup tag id tag = get_tag_id(tag, strict=True) return readTaggedRPMS(tag, package=package, arch=arch, event=event, inherit=True, - latest=True, rpmsigs=rpmsigs, type=type) + latest=True, rpmsigs=rpmsigs, type=type, draft=draft) def getLatestMavenArchives(self, tag, event=None, inherit=True): """Return a list of the latest Maven archives in the tag, as of the given event @@ -12149,6 +12499,7 @@ class RootExports(object): - release - arch - epoch + - draft - payloadhash - size - buildtime @@ -13455,6 +13806,8 @@ class RootExports(object): koji.plugin.run_callbacks('postBuildStateChange', attribute='completion_ts', old=ts_old, new=ts, info=buildinfo) + promoteBuild = staticmethod(_promote_build) + def count(self, methodName, *args, **kw): """Execute the XML-RPC method with the given name and count the results. A method return value of None will return O, a return value of type "list", "tuple", or @@ -14014,6 +14367,7 @@ class BuildRoot(object): ('epoch', 'epoch'), ('arch', 'arch'), ('build_id', 'build_id'), + ('draft', 'draft'), ('external_repo_id', 'external_repo_id'), ('external_repo.name', 'external_repo_name'), ) @@ -15037,6 +15391,7 @@ class HostExports(object): importImageInternal(task_id, build_info, sub_results) if 'rpmresults' in sub_results: rpm_results = sub_results['rpmresults'] + # draft will be rejected _import_wrapper(rpm_results['task_id'], get_build(build_info['id'], strict=True), rpm_results) @@ -15684,3 +16039,46 @@ def create_rpm_checksum(rpm_id, sigkey, chsum_dict): insert.add_record(rpm_id=rpm_id, sigkey=sigkey, checksum=chsum, checksum_type=koji.CHECKSUM_TYPES[func]) insert.execute() + + +def reject_draft(data, is_rpm=False, error=None): + """block draft build/rpm + + TODO: remove this once draft build is open for all build types + + :param dict data: buildinfo dict or rpminfo dict if is_rpm is true + :param bool is_rpm: indicates data is rpm or build (true/false) in default error msg + :param koji.GenericError error: the error raised if not a draft build, + defaults to None to raise the default "unsupported" error + :raises error: default or specified by input error when draft==True in data + """ + if data.get('draft'): + if error is None: + entry_type = 'rpm' if is_rpm else 'build' + error = koji.GenericError(f"Draft {entry_type} not supported") + raise error + + +def draft_clause(draft, table=None): + """get proper clause in build/rpm query for draft option + + :param bool draft: draft option: + True: "draft IS True" + False: "draft IS NOT True" + + :param str table: the table(alias) the draft belongs to + :return: the generated clause. + :rtype: str or None + :raises: GenericError if draft is None + """ + if draft is None: + raise koji.GenericError('draft cannot be None') + if not table: + table = '' + else: + table += '.' + if draft: + return f'{table}draft IS TRUE' + else: + # null is included + return f'{table}draft IS NOT TRUE' diff --git a/kojihub/kojixmlrpc.py b/kojihub/kojixmlrpc.py index 64ff8bc..36ec94b 100644 --- a/kojihub/kojixmlrpc.py +++ b/kojihub/kojixmlrpc.py @@ -610,6 +610,11 @@ _default_policies = { 'priority': ''' all :: stay ''', + 'draft_promotion': ''' + has_perm draft-promoter :: allow + is_build_owner :: allow + all :: deny Only draft-promoter and build owner can do this via default policy + ''' } diff --git a/plugins/hub/protonmsg.py b/plugins/hub/protonmsg.py index 0489ff4..c28aa4d 100644 --- a/plugins/hub/protonmsg.py +++ b/plugins/hub/protonmsg.py @@ -320,6 +320,22 @@ def prep_repo_done(cbtype, *args, **kws): queue_msg(address, props, kws) +@convert_datetime +@callback('postBuildPromote') +def prep_build_promote(cbtype, *args, **kws): + kws['build'] = _strip_extra(kws['build']) + address = 'build.promote' + props = {'type': cbtype[4:], + 'build_id': kws['build']['id'], + 'name': kws['build']['name'], + 'version': kws['build']['version'], + 'release': kws['build']['release'], + 'draft_release': kws['draft_release'], + 'target_release': kws['target_release'], + 'user': kws['user']['name']} + queue_msg(address, props, kws) + + def _send_msgs(urls, msgs, CONFIG): random.shuffle(urls) for url in urls: diff --git a/schemas/schema-upgrade-1.33-1.34.sql b/schemas/schema-upgrade-1.33-1.34.sql index ac02893..a4315ae 100644 --- a/schemas/schema-upgrade-1.33-1.34.sql +++ b/schemas/schema-upgrade-1.33-1.34.sql @@ -2,6 +2,7 @@ -- from version 1.33 to 1.34 BEGIN; + -- scheduler tables CREATE TABLE scheduler_task_runs ( id SERIAL NOT NULL PRIMARY KEY, @@ -48,4 +49,32 @@ BEGIN; ) WITHOUT OIDS; INSERT INTO locks(name) VALUES('scheduler'); + + -- draft builds + INSERT INTO permissions (name, description) VALUES ('draft-promoter', 'The permission required in the default "draft_promotion" hub policy rule to promote draft build.'); + + ALTER TABLE build ADD COLUMN draft BOOLEAN NOT NULL DEFAULT 'false'; + ALTER TABLE build ADD COLUMN promotion_time TIMESTAMPTZ; + ALTER TABLE build ADD COLUMN promoter INTEGER; + ALTER TABLE build ADD CONSTRAINT build_promoter_fkey FOREIGN KEY (promoter) REFERENCES users(id); + -- required by constraint rpminfo_build_id_draft_fkey on table rpminfo + ALTER TABLE build ADD CONSTRAINT draft_for_rpminfo UNIQUE (id, draft); + ALTER TABLE build ADD CONSTRAINT promotion_sane CHECK (NOT draft OR (promotion_time IS NULL AND promoter IS NULL)); + ALTER TABLE build ADD CONSTRAINT draft_release_sane CHECK + (NOT draft OR release ~ ('^.*,draft_' || id::TEXT || '$')); + + ALTER TABLE rpminfo ADD COLUMN draft BOOLEAN; + UPDATE rpminfo SET draft=FALSE WHERE build_id IS NOT NULL; + ALTER TABLE rpminfo DROP CONSTRAINT rpminfo_build_id_fkey; + ALTER TABLE rpminfo ADD CONSTRAINT rpminfo_build_id_draft_fkey + FOREIGN KEY (build_id, draft) REFERENCES build(id, draft) + ON UPDATE CASCADE; + ALTER TABLE rpminfo DROP CONSTRAINT rpminfo_unique_nvra; + ALTER TABLE rpminfo ADD CONSTRAINT build_id_draft_external_repo_id_sane + CHECK ((draft IS NULL AND build_id IS NULL AND external_repo_id <> 0) + OR (draft IS NOT NULL AND build_id IS NOT NULL AND external_repo_id = 0)); + CREATE UNIQUE INDEX rpminfo_unique_nvra_not_draft + ON rpminfo(name,version,release,arch,external_repo_id) + WHERE draft IS NOT TRUE; + COMMIT; diff --git a/schemas/schema.sql b/schemas/schema.sql index a007b2c..e5f3462 100644 --- a/schemas/schema.sql +++ b/schemas/schema.sql @@ -66,6 +66,7 @@ INSERT INTO permissions (name, description) VALUES ('tag', 'Manage packages in t INSERT INTO permissions (name, description) VALUES ('target', 'Add, edit, and remove targets.'); INSERT INTO permissions (name, description) VALUES ('win-admin', 'The default hub policy rule for "vm" requires this permission to trigger Windows builds.'); INSERT INTO permissions (name, description) VALUES ('win-import', 'Import win archives.'); +INSERT INTO permissions (name, description) VALUES ('draft-promoter', 'The permission required in the default "draft_promotion" hub policy rule to promote draft build.'); CREATE TABLE user_perms ( user_id INTEGER NOT NULL REFERENCES users(id), @@ -279,23 +280,30 @@ CREATE TABLE content_generator ( -- null, or may point to a deleted task. CREATE TABLE build ( id SERIAL NOT NULL PRIMARY KEY, - volume_id INTEGER NOT NULL REFERENCES volume (id), + volume_id INTEGER NOT NULL REFERENCES volume (id), pkg_id INTEGER NOT NULL REFERENCES package (id) DEFERRABLE, version TEXT NOT NULL, release TEXT NOT NULL, epoch INTEGER, + draft BOOLEAN NOT NULL DEFAULT 'false', source TEXT, create_event INTEGER NOT NULL REFERENCES events(id) DEFAULT get_event(), start_time TIMESTAMPTZ, completion_time TIMESTAMPTZ, + promotion_time TIMESTAMPTZ, state INTEGER NOT NULL, task_id INTEGER REFERENCES task (id), owner INTEGER NOT NULL REFERENCES users (id), + promoter INTEGER REFERENCES users (id), cg_id INTEGER REFERENCES content_generator(id), extra TEXT, CONSTRAINT build_pkg_ver_rel UNIQUE (pkg_id, version, release), + CONSTRAINT draft_for_rpminfo UNIQUE (id, draft), +-- ^ required by constraint rpminfo_build_id_draft_fkey on table rpminfo CONSTRAINT completion_sane CHECK ((state = 0 AND completion_time IS NULL) OR - (state != 0 AND completion_time IS NOT NULL)) + (state <> 0 AND completion_time IS NOT NULL)), + CONSTRAINT promotion_sane CHECK (NOT draft OR (promotion_time IS NULL AND promoter IS NULL)), + CONSTRAINT draft_release_sane CHECK (NOT draft OR release ~ ('^.*,draft_' || id::TEXT || '$')) ) WITHOUT OIDS; CREATE INDEX build_by_pkg_id ON build (pkg_id); @@ -717,26 +725,33 @@ CREATE TABLE group_package_listing ( -- rpminfo tracks individual rpms (incl srpms) -- buildroot_id can be NULL (for externally built packages) --- even though we track epoch, we demand that N-V-R.A be unique +-- even though we track epoch, we demand that N-V-R.A be unique (for non-draft builds) -- we don't store filename b/c filename should be N-V-R.A.rpm CREATE TABLE rpminfo ( id SERIAL NOT NULL PRIMARY KEY, - build_id INTEGER REFERENCES build (id), + build_id INTEGER, buildroot_id INTEGER REFERENCES buildroot (id), name TEXT NOT NULL, version TEXT NOT NULL, release TEXT NOT NULL, epoch INTEGER, arch VARCHAR(16) NOT NULL, + draft BOOLEAN, external_repo_id INTEGER NOT NULL REFERENCES external_repo(id), payloadhash TEXT NOT NULL, size BIGINT NOT NULL, buildtime BIGINT NOT NULL, metadata_only BOOLEAN NOT NULL DEFAULT FALSE, extra TEXT, - CONSTRAINT rpminfo_unique_nvra UNIQUE (name,version,release,arch,external_repo_id) + FOREIGN KEY (build_id, draft) REFERENCES build (id, draft) ON UPDATE CASCADE, +-- ^ ensures the draft field is consistent with the build entry + CONSTRAINT build_id_draft_external_repo_id_sane CHECK ( + (draft IS NULL AND build_id IS NULL AND external_repo_id <> 0) + OR (draft IS NOT NULL AND build_id IS NOT NULL AND external_repo_id = 0)) ) WITHOUT OIDS; CREATE INDEX rpminfo_build ON rpminfo(build_id); +CREATE UNIQUE INDEX rpminfo_unique_nvra_not_draft ON rpminfo(name,version,release,arch,external_repo_id) + WHERE draft IS NOT TRUE; -- index for default search method for rpms, PG11+ can benefit from new include method DO $$ DECLARE version integer; diff --git a/tests/test_builder/data/calls/build_notif_1/message.txt b/tests/test_builder/data/calls/build_notif_1/message.txt index 1693f6a..e46f489 100644 --- a/tests/test_builder/data/calls/build_notif_1/message.txt +++ b/tests/test_builder/data/calls/build_notif_1/message.txt @@ -5,6 +5,7 @@ X-Koji-Tag: f23 X-Koji-Package: sisu X-Koji-Builder: user X-Koji-Status: complete +X-Koji-Draft: False Package: sisu-0.3.0-0.2.M1.fc23 Tag: f23 diff --git a/tests/test_builder/data/calls/build_notif_1/params.json b/tests/test_builder/data/calls/build_notif_1/params.json index 64281ee..12b2ebe 100644 --- a/tests/test_builder/data/calls/build_notif_1/params.json +++ b/tests/test_builder/data/calls/build_notif_1/params.json @@ -22,7 +22,8 @@ "completion_ts": 1424271457.10787, "id": 612609, "volume_name": "DEFAULT", - "nvr": "sisu-0.3.0-0.2.M1.fc23" + "nvr": "sisu-0.3.0-0.2.M1.fc23", + "draft": false }, "target": { "dest_tag": 292, diff --git a/tests/test_cli/data/list-commands.txt b/tests/test_cli/data/list-commands.txt index ed8a656..e0799eb 100644 --- a/tests/test_cli/data/list-commands.txt +++ b/tests/test_cli/data/list-commands.txt @@ -128,6 +128,7 @@ miscellaneous commands: dist-repo Create a yum repo with distribution options import-comps Import group/package information from a comps file moshimoshi Introduce yourself + promote-build Promote a draft build version Report client and hub versions monitor commands: diff --git a/tests/test_cli/test_build.py b/tests/test_cli/test_build.py index fe53995..b66d391 100644 --- a/tests/test_cli/test_build.py +++ b/tests/test_cli/test_build.py @@ -176,6 +176,7 @@ Options: Provide a JSON string of custom metadata to be deserialized and stored under the build's extra.custom_user_metadata field + --draft Build draft build instead """ % (self.progname, self.progname)) # Finally, assert that things were called as we expected. diff --git a/tests/test_cli/test_buildinfo.py b/tests/test_cli/test_buildinfo.py index e33b7c2..c0180f6 100644 --- a/tests/test_cli/test_buildinfo.py +++ b/tests/test_cli/test_buildinfo.py @@ -80,6 +80,37 @@ Tags: self.session.listRPMs.assert_called_once_with(buildID=self.buildinfo['id']) self.assertEqual(self.session.listArchives.call_count, 4) + @mock.patch('sys.stdout', new_callable=StringIO) + def test_buildinfo_draft(self, stdout): + build = 'test-build-1-1' + binfo = copy.deepcopy(self.buildinfo) + binfo['draft'] = True + self.session.getBuild.return_value = binfo + self.session.getTaskInfo.return_value = self.taskinfo + self.session.listTags.return_value = [] + self.session.getMavenBuild.return_value = None + self.session.getWinBuild.return_value = None + self.session.listArchives.return_value = [] + self.session.listRPMs.return_value = [] + expected_stdout = """BUILD: test-build-1-1 [1] +Draft: YES +State: COMPLETE +Built by: kojiadmin +Volume: DEFAULT +Task: 8 build (target, src) +Finished: Thu, 04 Mar 2021 14:45:40 UTC +Tags: +""" + anon_handle_buildinfo(self.options, self.session, [build]) + self.assert_console_message(stdout, expected_stdout) + self.session.listTags.assert_called_once_with(build) + self.session.getBuild.assert_called_once_with(build) + self.session.getTaskInfo.assert_called_once_with(self.buildinfo['task_id'], request=True) + self.session.getMavenBuild.assert_called_once_with(self.buildinfo['id']) + self.session.getWinBuild.assert_called_once_with(self.buildinfo['id']) + self.session.listRPMs.assert_called_once_with(buildID=self.buildinfo['id']) + self.assertEqual(self.session.listArchives.call_count, 4) + def test_buildinfo_more_build_with_non_exist_build(self): build = 'test-build-1-1' non_exist_build = 'test-build-11-12' diff --git a/tests/test_cli/test_list_builds.py b/tests/test_cli/test_list_builds.py index 0bc8035..4f0875a 100644 --- a/tests/test_cli/test_list_builds.py +++ b/tests/test_cli/test_list_builds.py @@ -609,6 +609,8 @@ Options: pattern) --owner=OWNER List builds built by this owner --volume=VOLUME List builds by volume ID + --draft-only Only list draft builds + --no-draft Only list regular builds -k FIELD, --sort-key=FIELD Sort the list by the named field. Allowed sort keys: build_id, owner_name, state diff --git a/tests/test_cli/test_list_tagged.py b/tests/test_cli/test_list_tagged.py index b30dd5c..7eea290 100644 --- a/tests/test_cli/test_list_tagged.py +++ b/tests/test_cli/test_list_tagged.py @@ -42,6 +42,15 @@ class TestCliListTagged(utils.CliTestCase): 'release': '1.el6', 'arch': 'x86_64', 'sigkey': 'sigkey', + 'extra': None}, + {'id': 102, + 'build_id': 2, + 'name': 'rpmA', + 'version': '0.0.1', + 'release': '2.el6', + 'arch': 'x86_64', + 'sigkey': 'sigkey', + 'draft': True, 'extra': None} ], [{'id': 1, 'name': 'packagename', @@ -50,6 +59,15 @@ class TestCliListTagged(utils.CliTestCase): 'nvr': 'n-v-r', 'tag_name': 'tag', 'owner_name': 'owner', + 'extra': 'extra-value-2'}, + {'id': 2, + 'name': 'packagename', + 'version': 'version', + 'release': '2.el6,draft_2', + 'nvr': 'n-v-r', + 'draft': True, + 'tag_name': 'tag', + 'owner_name': 'owner', 'extra': 'extra-value-2'}]] self.session.listTagged.return_value = [{'id': 1, 'name': 'packagename', @@ -77,13 +95,15 @@ Build Tag Built by ---------------------------------------- -------------------- ---------------- n-v-r tag owner """ - args = [self.tag, self.pkg, '--latest', '--inherit', '--event', str(self.event_id)] + args = [self.tag, self.pkg, '--no-draft', '--latest', '--inherit', + '--event', str(self.event_id)] anon_handle_list_tagged(self.options, self.session, args) self.ensure_connection_mock.assert_called_once_with(self.session, self.options) self.session.getTag.assert_called_once_with(self.tag, event=self.event_id) self.session.listTagged.assert_called_once_with( - self.tag, event=self.event_id, inherit=True, latest=True, package=self.pkg) + self.tag, event=self.event_id, inherit=True, latest=True, package=self.pkg, + draft=False) self.session.listTaggedRPMS.assert_not_called() self.assert_console_message(stdout, expected) @@ -94,14 +114,14 @@ n-v-r tag owner ---------------------------------------- -------------------- ---------------- /mnt/koji/packages/packagename/version/1.el6 tag owner """ - args = [self.tag, self.pkg, '--latest', '--inherit', '--paths'] + args = [self.tag, self.pkg, '--latest', '--inherit', '--paths', '--draft-only'] anon_handle_list_tagged(self.options, self.session, args) self.assert_console_message(stdout, expected) self.ensure_connection_mock.assert_called_once_with(self.session, self.options) self.session.getTag.assert_called_once_with(self.tag, event=None) self.session.listTagged.assert_called_once_with( - self.tag, inherit=True, latest=True, package=self.pkg) + self.tag, inherit=True, latest=True, package=self.pkg, draft=True) self.session.listTaggedRPMS.assert_not_called() @mock.patch('sys.stdout', new_callable=six.StringIO) @@ -109,6 +129,7 @@ n-v-r tag owner def test_list_tagged_rpms(self, event_from_opts_mock, stdout): expected = """sigkey rpmA-0.0.1-1.el6.noarch sigkey rpmA-0.0.1-1.el6.x86_64 +sigkey rpmA-0.0.1-2.el6.x86_64 (,draft_2) """ args = [self.tag, self.pkg, '--latest-n=3', '--rpms', '--sigs', '--arch=x86_64', '--arch=noarch'] @@ -129,6 +150,7 @@ sigkey rpmA-0.0.1-1.el6.x86_64 def test_list_tagged_rpms_paths(self, event_from_opts_mock, stdout, os_path_exists, isdir): expected = """/mnt/koji/packages/packagename/version/1.el6/noarch/rpmA-0.0.1-1.el6.noarch.rpm /mnt/koji/packages/packagename/version/1.el6/x86_64/rpmA-0.0.1-1.el6.x86_64.rpm +/mnt/koji/packages/packagename/version/2.el6,draft_2/x86_64/rpmA-0.0.1-2.el6.x86_64.rpm """ args = [self.tag, self.pkg, '--latest-n=3', '--rpms', '--arch=x86_64', '--paths'] @@ -233,6 +255,15 @@ n-v-r tag group self.session.listTaggedRPMS.assert_not_called() self.session.listTagged.assert_not_called() + def test_list_tagged_draft_opts_conflict(self): + self.assert_system_exit( + anon_handle_list_tagged, + self.options, self.session, ['--draft-only', '--no-draft', 'tag', 'pkg1'], + stderr=self.format_error_message("--draft-only conflicts with --no-draft"), + activate_session=None, + exit_code=2) + self.ensure_connection_mock.assert_not_called() + def test_list_tagged_tag_not_found(self): self.session.getTag.return_value = None self.assert_system_exit( @@ -267,4 +298,6 @@ Options: --event=EVENT# query at event --ts=TIMESTAMP query at last event before timestamp --repo=REPO# query at event for a repo + --draft-only Only list draft builds/rpms + --no-draft Only list regular builds/rpms """ % self.progname) diff --git a/tests/test_cli/test_promote_build.py b/tests/test_cli/test_promote_build.py new file mode 100644 index 0000000..49530aa --- /dev/null +++ b/tests/test_cli/test_promote_build.py @@ -0,0 +1,103 @@ +from __future__ import absolute_import +import koji +import locale +import mock +import os +import time +from six.moves import StringIO + +from koji_cli.commands import handle_promote_build +from . import utils + + +class TestPromoteBuild(utils.CliTestCase): + def setUp(self): + self.maxDiff = None + self.options = mock.MagicMock() + self.session = mock.MagicMock() + self.buildinfo = {'id': 1, + 'name': 'foo-bar', + 'nvr': 'foo-bar-1.1-11', + 'package_id': 2, + 'package_name': 'test-rpm', + 'release': '11#draft_1', + 'version': '1.1', + 'draft': True} + self.target_binfo = self.buildinfo.copy() + self.target_binfo['relesae'] = '11' + self.target_binfo['draft'] = False + self.error_format = """Usage: %s promote-build [options] +(Specify the --help global option for a list of other help options) + +%s: error: {message} +""" % (self.progname, self.progname) + + @mock.patch('sys.stdout', new_callable=StringIO) + @mock.patch('koji_cli.commands.activate_session') + def test_handle_promote_build_valid(self, activate_session, stdout): + build_nvr = 'foo-bar-1.1-11' + self.session.getBuild.return_value = self.buildinfo + self.session.promoteBuild.return_value = self.target_binfo + expected_output = "foo-bar-1.1-11 has been promoted to foo-bar-1.1-11\n" + handle_promote_build(self.options, self.session, [build_nvr]) + self.assert_console_message(stdout, expected_output) + activate_session.assert_called_once_with(self.session, self.options) + self.session.getBuild.assert_called_once_with(build_nvr) + self.session.promoteBuild.assert_called_once_with(self.buildinfo['id'], force=False) + + def test_handle_promote_build_non_exist_build(self): + build_nvr = 'foo-bar-1.1-11' + self.session.getBuild.return_value = None + expected = "No such build: %s\n" % build_nvr + self.assert_system_exit( + handle_promote_build, + self.options, self.session, [build_nvr], + stdout='', + stderr=expected, + exit_code=1) + + def test_handle_promote_build_not_draft(self): + build_nvr = 'foo-bar-1.1-11' + self.session.getBuild.return_value = self.target_binfo + expected = "Not a draft build: %s\n" % build_nvr + self.assert_system_exit( + handle_promote_build, + self.options, self.session, [build_nvr], + stdout='', + stderr=expected, + exit_code=1) + + def test_promote_build_force_not_admin(self): + arguments = ['--force', 'build'] + self.session.hasPerm.return_value = False + self.assert_system_exit( + handle_promote_build, + self.options, self.session, arguments, + stdout='', + stderr=self.format_error_message("--force requires admin privilege"), + exit_code=2) + self.session.getBuild.assert_not_called() + self.session.promoteBuild.assert_not_called() + + def test_promote_build_without_option(self): + arguments = [] + self.assert_system_exit( + handle_promote_build, + self.options, self.session, arguments, + stdout='', + stderr=self.format_error_message("Please specify a draft build"), + exit_code=2, + activate_session=None) + self.session.getBuild.assert_not_called() + self.session.promoteBuild.assert_not_called() + + def test_promote_build_help(self): + self.assert_help( + handle_promote_build, + """Usage: %s promote-build [options] +(Specify the --help global option for a list of other help options) + +Options: + -h, --help show this help message and exit + -f, --force force operation +""" % self.progname) diff --git a/tests/test_cli/test_rpminfo.py b/tests/test_cli/test_rpminfo.py index 3e2beb3..e4784f4 100644 --- a/tests/test_cli/test_rpminfo.py +++ b/tests/test_cli/test_rpminfo.py @@ -53,7 +53,18 @@ class TestRpminfo(utils.CliTestCase): 'version': '1.1', 'payloadhash': 'b2b95550390e5f213fc25f33822425f7', 'size': 7030} - self.error_format = """Usage: %s rpminfo [options] [ ...] + self.listrpminfos = [{'arch': 'src', + 'build_id': 1, + 'buildroot_id': 3, + 'buildtime': 1615877809, + 'epoch': 7, + 'id': 290, + 'name': 'test-rpm', + 'release': '11', + 'version': '1.1', + 'payloadhash': 'b2b95550390e5f213fc25f33822425f7', + 'size': 7030}] + self.error_format = """Usage: %s rpminfo [options] [ ...] (Specify the --help global option for a list of other help options) %s: error: {message} @@ -74,9 +85,11 @@ class TestRpminfo(utils.CliTestCase): self.session.listBuildroots.return_value = [self.buildroot_info] self.session.getBuild.return_value = self.buildinfo self.session.getRPM.return_value = self.getrpminfo + self.session.listRPMs.return_value = self.listrpminfos expected_output = """RPM: 7:test-rpm-1.1-11.noarch [294] +Build: test-rpm-1.1-11 [1] RPM Path: /mnt/koji/packages/test-rpm/1.1/11/noarch/test-rpm-1.1-11.noarch.rpm -SRPM: 7:test-rpm-1.1-11 [1] +SRPM: 7:test-rpm-1.1-11 [290] SRPM Path: /mnt/koji/packages/test-rpm/1.1/11/src/test-rpm-1.1-11.src.rpm Built: Tue, 16 Mar 2021 06:56:49 UTC SIGMD5: b2b95550390e5f213fc25f33822425f7 @@ -98,6 +111,8 @@ Used in 1 buildroots: rpmID=self.getrpminfo['id']) self.session.getBuild.assert_called_once_with(self.getrpminfo['build_id']) self.session.getRPM.assert_called_once_with(rpm_nvra) + self.session.listRPMs.assert_called_once_with(buildID=self.getrpminfo['build_id'], + arches='src') def test_handle_rpminfo_non_exist_nvra(self): rpm_nvra = 'test-rpm-nvra.arch' @@ -119,9 +134,11 @@ Used in 1 buildroots: self.session.listBuildroots.return_value = [self.buildroot_info] self.session.getBuild.return_value = self.buildinfo self.session.getRPM.side_effect = [None, self.getrpminfo] + self.session.listRPMs.return_value = self.listrpminfos expected_output = """RPM: 7:test-rpm-1.1-11.noarch [294] +Build: test-rpm-1.1-11 [1] RPM Path: /mnt/koji/packages/test-rpm/1.1/11/noarch/test-rpm-1.1-11.noarch.rpm -SRPM: 7:test-rpm-1.1-11 [1] +SRPM: 7:test-rpm-1.1-11 [290] SRPM Path: /mnt/koji/packages/test-rpm/1.1/11/src/test-rpm-1.1-11.src.rpm Built: Tue, 16 Mar 2021 06:56:49 UTC SIGMD5: b2b95550390e5f213fc25f33822425f7 @@ -150,6 +167,9 @@ Used in 1 buildroots: rpmID=self.getrpminfo['id']) self.session.getBuild.assert_called_once_with(self.getrpminfo['build_id']) self.assertEqual(self.session.getRPM.call_count, 2) + self.session.listRPMs.assert_called_once_with(buildID=self.getrpminfo['build_id'], + arches='src') + def test_rpminfo_without_option(self): arguments = [] @@ -167,7 +187,7 @@ Used in 1 buildroots: def test_rpminfo_help(self): self.assert_help( anon_handle_rpminfo, - """Usage: %s rpminfo [options] [ ...] + """Usage: %s rpminfo [options] [ ...] (Specify the --help global option for a list of other help options) Options: diff --git a/tests/test_cli/test_wrapper_rpm.py b/tests/test_cli/test_wrapper_rpm.py index bb40cb2..507b5d4 100644 --- a/tests/test_cli/test_wrapper_rpm.py +++ b/tests/test_cli/test_wrapper_rpm.py @@ -196,7 +196,7 @@ class TestWrapperRpm(utils.CliTestCase): @mock.patch('koji_cli.commands.activate_session') def test_handle_wrapper_rpm_argument_error( self, activate_session_mock, stderr, stdout): - """Test handle_wrapper_rpm help message output""" + """Test handle_wrapper_rpm error message output""" arguments = [] options = mock.MagicMock() @@ -219,8 +219,35 @@ class TestWrapperRpm(utils.CliTestCase): # Finally, assert that things were called as we expected. activate_session_mock.assert_not_called() + @mock.patch('sys.stdout', new_callable=six.StringIO) + @mock.patch('sys.stderr', new_callable=six.StringIO) + @mock.patch('koji_cli.commands.activate_session') + def test_handle_wrapper_rpm_argument_conflict_error( + self, activate_session_mock, stderr, stdout): + """Test handle_wrapper_rpm error message output""" + arguments = ['--scratch', '--create-draft', 'foo', 'n-v-r', 'scmurl'] + options = mock.MagicMock() + + # Mock out the xmlrpc server + session = mock.MagicMock() + + # Run it and check immediate output + expected = self.format_error_message( + "--scratch and --create-draft cannot be both specfied") + self.assert_system_exit( + handle_wrapper_rpm, + options, + session, + arguments, + stdout='Will create a draft build instead\n', + stderr=expected, + activate_session=None) + + # Finally, assert that things were called as we expected. + activate_session_mock.assert_not_called() + def test_handle_wrapper_rpm_help(self): - """Test handle_wrapper_rpm help message output""" + """Test handle_wrapper_rpm help message output""" self.assert_help( handle_wrapper_rpm, """Usage: %s wrapper-rpm [options] @@ -237,6 +264,7 @@ Options: --wait Wait on build, even if running in the background --nowait Don't wait on build --background Run the build at a lower priority + --create-draft Create a new draft build instead """ % self.progname) diff --git a/tests/test_hub/test_delete_build.py b/tests/test_hub/test_delete_build.py index 119a51c..9fd50ac 100644 --- a/tests/test_hub/test_delete_build.py +++ b/tests/test_hub/test_delete_build.py @@ -12,7 +12,6 @@ UP = kojihub.UpdateProcessor class TestDeleteBuild(unittest.TestCase): - def getDelete(self, *args, **kwargs): delete = DP(*args, **kwargs) delete.execute = mock.MagicMock() @@ -32,115 +31,140 @@ class TestDeleteBuild(unittest.TestCase): return update def setUp(self): - self.DeleteProcessor = mock.patch('kojihub.kojihub.DeleteProcessor', - side_effect=self.getDelete).start() + self.DeleteProcessor = mock.patch( + "kojihub.kojihub.DeleteProcessor", side_effect=self.getDelete + ).start() self.deletes = [] - self.QueryProcessor = mock.patch('kojihub.kojihub.QueryProcessor', - side_effect=self.getQuery).start() + self.QueryProcessor = mock.patch( + "kojihub.kojihub.QueryProcessor", side_effect=self.getQuery + ).start() self.queries = [] self.query_execute = mock.MagicMock() - self.UpdateProcessor = mock.patch('kojihub.kojihub.UpdateProcessor', - side_effect=self.getUpdate).start() + self.UpdateProcessor = mock.patch( + "kojihub.kojihub.UpdateProcessor", side_effect=self.getUpdate + ).start() self.updates = [] - self.context_db = mock.patch('kojihub.db.context').start() + self.context_db = mock.patch("kojihub.db.context").start() self.context_db.session.assertLogin = mock.MagicMock() self.context_db.event_id = 42 self.context_db.session.user_id = 24 - self.get_build = mock.patch('kojihub.kojihub.get_build').start() - self._delete_build = mock.patch('kojihub.kojihub._delete_build').start() - self.get_user = mock.patch('kojihub.kojihub.get_user').start() - self.context = mock.patch('kojihub.kojihub.context').start() + self.get_build = mock.patch("kojihub.kojihub.get_build").start() + self._delete_build = mock.patch("kojihub.kojihub._delete_build").start() + self.get_user = mock.patch("kojihub.kojihub.get_user").start() + self.context = mock.patch("kojihub.kojihub.context").start() self.context.session.assertPerm = mock.MagicMock() - self.binfo = {'id': 'BUILD ID', 'state': koji.BUILD_STATES['COMPLETE'], 'name': 'test_nvr', - 'nvr': 'test_nvr-3.3-20.el8', 'version': '3.3', 'release': '20'} + self.binfo = { + "id": "BUILD ID", + "state": koji.BUILD_STATES["COMPLETE"], + "name": "test_nvr", + "nvr": "test_nvr-3.3-20.el8", + "version": "3.3", + "release": "20", + "volume_id": 1, + "volume_name": 'testvol', + "draft": False + } def tearDown(self): mock.patch.stopall() def test_delete_build_raise_error(self): - references = ['tags', 'rpms', 'archives', 'component_of'] + references = ["tags", "rpms", "archives", "component_of"] for ref in references: context = mock.MagicMock() context.session.return_value = context - with mock.patch('kojihub.kojihub.build_references') as refs: + with mock.patch("kojihub.kojihub.build_references") as refs: retval = defaultdict(dict) retval[ref] = True refs.return_value = retval with self.assertRaises(koji.GenericError): - kojihub.delete_build(build='', strict=True) + kojihub.delete_build(build="", strict=True) def test_delete_build_return_false(self): - references = ['tags', 'rpms', 'archives', 'component_of'] + references = ["tags", "rpms", "archives", "component_of"] for ref in references: context = mock.MagicMock() context.session.return_value = context - with mock.patch('kojihub.kojihub.build_references') as refs: + with mock.patch("kojihub.kojihub.build_references") as refs: retval = defaultdict(dict) retval[ref] = True refs.return_value = retval - assert kojihub.delete_build(build='', strict=False) is False + assert kojihub.delete_build(build="", strict=False) is False def test_delete_build_check_last_used_raise_error(self): - references = ['tags', 'rpms', 'archives', 'component_of', 'last_used'] + references = ["tags", "rpms", "archives", "component_of", "last_used"] for ref in references: context = mock.MagicMock() context.session.return_value = context - with mock.patch('kojihub.kojihub.build_references') as refs: + with mock.patch("kojihub.kojihub.build_references") as refs: retval = defaultdict(dict) - if ref == 'last_used': + if ref == "last_used": retval[ref] = time.time() + 100 refs.return_value = retval - self.assertFalse(kojihub.delete_build(build='', strict=False)) + self.assertFalse(kojihub.delete_build(build="", strict=False)) - @mock.patch('kojihub.kojihub.build_references') + @mock.patch("kojihub.kojihub.build_references") def test_delete_build_lazy_refs(self, buildrefs): - '''Test that we can handle lazy return from build_references''' - self.get_user.return_value = {'authtype': 2, 'id': 1, 'krb_principal': None, - 'krb_principals': [], 'name': 'kojiadmin', 'status': 0, - 'usertype': 0} - buildrefs.return_value = {'tags': []} + """Test that we can handle lazy return from build_references""" + self.get_user.return_value = { + "authtype": 2, + "id": 1, + "krb_principal": None, + "krb_principals": [], + "name": "kojiadmin", + "status": 0, + "usertype": 0, + } + buildrefs.return_value = {"tags": []} self.get_build.return_value = self.binfo kojihub.delete_build(build=self.binfo, strict=True) # no build refs, so we should have called _delete_build self._delete_build.assert_called_with(self.binfo) - def test_delete_build_queries(self): - self.query_execute.return_value = [(123, )] + @mock.patch("os.unlink") + @mock.patch("koji.util.rmtree") + def test_delete_build_queries(self, rmtree, unlink): + self.query_execute.side_effect = [ + [(123,)], # rpm ids + {'id': 0, 'name': 'DEFAULT'}, # volume DEFAULT + [{'id': 0, 'name': 'DEFAULT'}, + {'id': 1, 'name': 'testvol'}, + {'id': 2, 'name': 'other'}] # list_volumes() + ] kojihub._delete_build(self.binfo) - self.assertEqual(len(self.queries), 1) + self.assertEqual(len(self.queries), 3) query = self.queries[0] - self.assertEqual(query.tables, ['rpminfo']) + self.assertEqual(query.tables, ["rpminfo"]) self.assertEqual(query.joins, None) - self.assertEqual(query.clauses, ['build_id=%(build_id)i']) - self.assertEqual(query.columns, ['id']) + self.assertEqual(query.clauses, ["build_id=%(build_id)i"]) + self.assertEqual(query.columns, ["id"]) self.assertEqual(len(self.deletes), 2) delete = self.deletes[0] - self.assertEqual(delete.table, 'rpmsigs') + self.assertEqual(delete.table, "rpmsigs") self.assertEqual(delete.clauses, ["rpm_id=%(rpm_id)i"]) delete = self.deletes[1] - self.assertEqual(delete.table, 'rpm_checksum') + self.assertEqual(delete.table, "rpm_checksum") self.assertEqual(delete.clauses, ["rpm_id=%(rpm_id)i"]) self.assertEqual(len(self.updates), 2) update = self.updates[0] - self.assertEqual(update.table, 'tag_listing') - self.assertEqual(update.values, {'build_id': self.binfo['id']}) - self.assertEqual(update.data, {'revoke_event': 42, 'revoker_id': 24}) - self.assertEqual(update.rawdata, {'active': 'NULL'}) - self.assertEqual(update.clauses, ["build_id=%(build_id)i", 'active = TRUE']) + self.assertEqual(update.table, "tag_listing") + self.assertEqual(update.values, {"build_id": self.binfo["id"]}) + self.assertEqual(update.data, {"revoke_event": 42, "revoker_id": 24}) + self.assertEqual(update.rawdata, {"active": "NULL"}) + self.assertEqual(update.clauses, ["build_id=%(build_id)i", "active = TRUE"]) update = self.updates[1] - self.assertEqual(update.table, 'build') - self.assertEqual(update.values, {'build_id': self.binfo['id']}) - self.assertEqual(update.data, {'state': 2}) + self.assertEqual(update.table, "build") + self.assertEqual(update.values, {"build_id": self.binfo["id"]}) + self.assertEqual(update.data, {"state": 2}) self.assertEqual(update.rawdata, {}) - self.assertEqual(update.clauses, ['id=%(build_id)i']) - + self.assertEqual(update.clauses, ["id=%(build_id)i"]) diff --git a/tests/test_hub/test_getRPM.py b/tests/test_hub/test_getRPM.py index 984c4e0..f9ab508 100644 --- a/tests/test_hub/test_getRPM.py +++ b/tests/test_hub/test_getRPM.py @@ -16,6 +16,10 @@ class TestGetRPM(DBQueryTestCase): self.exports = kojihub.RootExports() self.context = mock.patch('kojihub.kojihub.context').start() self.get_external_repo_id = mock.patch('kojihub.kojihub.get_external_repo_id').start() + self.find_build_id = mock.patch('kojihub.kojihub.find_build_id').start() + + def tearDown(self): + mock.patch.stopall() def test_wrong_type_rpminfo(self): rpminfo = ['test-user'] @@ -25,19 +29,18 @@ class TestGetRPM(DBQueryTestCase): def test_rpm_info_int(self): rpminfo = 123 - self.qp_execute_one_return_value = {'rpminfo.id': 123} + self.qp_execute_return_value = [{'rpminfo.id': 123}] result = kojihub.get_rpm(rpminfo) self.assertEqual(result, {'rpminfo.id': 123}) self.assertEqual(len(self.queries), 1) query = self.queries[0] - str(query) self.assertEqual(query.tables, ['rpminfo']) columns = ['rpminfo.id', 'build_id', 'buildroot_id', 'rpminfo.name', 'version', 'release', - 'epoch', 'arch', 'external_repo_id', 'external_repo.name', 'payloadhash', - 'size', 'buildtime', 'metadata_only', 'extra'] + 'epoch', 'arch', 'draft', 'external_repo_id', 'external_repo.name', + 'payloadhash', 'size', 'buildtime', 'metadata_only', 'extra'] self.assertEqual(set(query.columns), set(columns)) - self.assertEqual(query.clauses, ['external_repo_id = 0', "rpminfo.id=%(id)s"]) + self.assertEqual(query.clauses, ["rpminfo.id=%(id)s"]) self.assertEqual(query.joins, ['external_repo ON rpminfo.external_repo_id = external_repo.id']) self.assertEqual(query.values, {'id': rpminfo}) @@ -50,11 +53,10 @@ class TestGetRPM(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] - str(query) self.assertEqual(query.tables, ['rpminfo']) columns = ['rpminfo.id', 'build_id', 'buildroot_id', 'rpminfo.name', 'version', 'release', - 'epoch', 'arch', 'external_repo_id', 'external_repo.name', 'payloadhash', - 'size', 'buildtime', 'metadata_only', 'extra'] + 'epoch', 'arch', 'draft', 'external_repo_id', 'external_repo.name', + 'payloadhash', 'size', 'buildtime', 'metadata_only', 'extra'] self.assertEqual(set(query.columns), set(columns)) self.assertEqual(query.clauses, ["rpminfo.id=%(id)s"]) self.assertEqual(query.joins, @@ -70,11 +72,10 @@ class TestGetRPM(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] - str(query) self.assertEqual(query.tables, ['rpminfo']) columns = ['rpminfo.id', 'build_id', 'buildroot_id', 'rpminfo.name', 'version', 'release', - 'epoch', 'arch', 'external_repo_id', 'external_repo.name', 'payloadhash', - 'size', 'buildtime', 'metadata_only', 'extra'] + 'epoch', 'arch', 'draft', 'external_repo_id', 'external_repo.name', + 'payloadhash', 'size', 'buildtime', 'metadata_only', 'extra'] self.assertEqual(set(query.columns), set(columns)) self.assertEqual(query.clauses, ["rpminfo.id=%(id)s"]) self.assertEqual(query.joins, @@ -87,11 +88,10 @@ class TestGetRPM(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] - str(query) self.assertEqual(query.tables, ['rpminfo']) columns = ['rpminfo.id', 'build_id', 'buildroot_id', 'rpminfo.name', 'version', 'release', - 'epoch', 'arch', 'external_repo_id', 'external_repo.name', 'payloadhash', - 'size', 'buildtime', 'metadata_only', 'extra'] + 'epoch', 'arch', 'draft', 'external_repo_id', 'external_repo.name', + 'payloadhash', 'size', 'buildtime', 'metadata_only', 'extra'] self.assertEqual(set(query.columns), set(columns)) self.assertEqual(query.clauses, ["rpminfo.name=%(name)s AND version=%(version)s " "AND release=%(release)s AND arch=%(arch)s"]) @@ -110,18 +110,56 @@ class TestGetRPM(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] - str(query) self.assertEqual(query.tables, ['rpminfo']) columns = ['rpminfo.id', 'build_id', 'buildroot_id', 'rpminfo.name', 'version', 'release', - 'epoch', 'arch', 'external_repo_id', 'external_repo.name', 'payloadhash', - 'size', 'buildtime', 'metadata_only', 'extra'] + 'epoch', 'arch', 'draft', 'external_repo_id', 'external_repo.name', + 'payloadhash', 'size', 'buildtime', 'metadata_only', 'extra'] self.assertEqual(set(query.columns), set(columns)) self.assertEqual(query.clauses, - ["external_repo_id = %(external_repo_id)i", "rpminfo.id=%(id)s"]) + ["external_repo_id = %(external_repo_id)s", "rpminfo.id=%(id)s"]) self.assertEqual(query.joins, ['external_repo ON rpminfo.external_repo_id = external_repo.id']) self.assertEqual(query.values, rpminfo_data) + @mock.patch('kojihub.kojihub._get_rpms') + def test_rpm_info_preferred(self, _get_rpms): + rpminfo = 'testrpm-1.23-4.x86_64.rpm' + rpm_ext = {'id': 1, 'external_repo_id': 2, 'draft': False} + rpm_ext2 = {'id': 2, 'external_repo_id': 2, 'draft': False} + rpm_draft = {'id': 3, 'external_repo_id': 0, 'draft': True} + rpm_draft2 = {'id': 4, 'external_repo_id': 0, 'draft': True} + rpm_nondraft = {'id': 5, 'external_repo_id': 0, 'draft': False} + + # this test checks that the expected preferences are followed for multiple matches + + _get_rpms.return_value = [rpm_ext, rpm_ext2, rpm_draft, rpm_draft2, rpm_nondraft] + ret = kojihub.get_rpm(rpminfo, multi=False) + self.assertEqual(ret, rpm_nondraft) + + _get_rpms.return_value = [rpm_ext, rpm_ext2, rpm_draft, rpm_draft2] + ret = kojihub.get_rpm(rpminfo, multi=False) + self.assertEqual(ret, rpm_draft2) + + _get_rpms.return_value = [rpm_ext, rpm_ext2, rpm_draft] + ret = kojihub.get_rpm(rpminfo, multi=False) + self.assertEqual(ret, rpm_draft) + + _get_rpms.return_value = [rpm_ext, rpm_ext2] + ret = kojihub.get_rpm(rpminfo, multi=False) + self.assertEqual(ret, rpm_ext2) + + _get_rpms.return_value = [rpm_ext] + ret = kojihub.get_rpm(rpminfo, multi=False) + self.assertEqual(ret, rpm_ext) + + # multiple nondraft matches should error + rpm_bad = {'id': 6, 'external_repo_id': 0, 'draft': False} + _get_rpms.return_value = [rpm_nondraft, rpm_bad] + with self.assertRaises(koji.GenericError): + ret = kojihub.get_rpm(rpminfo, multi=False) + + self.assertEqual(len(self.queries), 0) # _get_rpm is mocked + class TestGetRPMHeaders(unittest.TestCase): diff --git a/tests/test_hub/test_get_build.py b/tests/test_hub/test_get_build.py index 78c4ef5..32f0ecf 100644 --- a/tests/test_hub/test_get_build.py +++ b/tests/test_hub/test_get_build.py @@ -31,21 +31,7 @@ class TestGetBuild(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] self.assertEqual(query.tables, ['build']) - self.assertEqual(query.joins, ['events ON build.create_event = events.id', - 'package on build.pkg_id = package.id', - 'volume on build.volume_id = volume.id', - 'users on build.owner = users.id']) self.assertEqual(query.clauses, ['build.id = %(buildID)i']) - self.assertEqual(query.columns, - ['build.id', 'build.cg_id', 'build.completion_time', - "date_part('epoch', build.completion_time)", 'events.id', 'events.time', - "date_part('epoch', events.time)", 'build.epoch', 'build.extra', - 'build.id', 'package.name', - "package.name || '-' || build.version || '-' || build.release", - 'users.id', 'users.name', 'package.id', 'package.name', 'build.release', - 'build.source', 'build.start_time', - "date_part('epoch', build.start_time)", 'build.state', 'build.task_id', - 'build.version', 'volume.id', 'volume.name']) def test_non_exist_build_int_without_result_without_strict(self): build = 11 @@ -56,21 +42,7 @@ class TestGetBuild(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] self.assertEqual(query.tables, ['build']) - self.assertEqual(query.joins, ['events ON build.create_event = events.id', - 'package on build.pkg_id = package.id', - 'volume on build.volume_id = volume.id', - 'users on build.owner = users.id']) self.assertEqual(query.clauses, ['build.id = %(buildID)i']) - self.assertEqual(query.columns, - ['build.id', 'build.cg_id', 'build.completion_time', - "date_part('epoch', build.completion_time)", 'events.id', 'events.time', - "date_part('epoch', events.time)", 'build.epoch', 'build.extra', - 'build.id', 'package.name', - "package.name || '-' || build.version || '-' || build.release", - 'users.id', 'users.name', 'package.id', 'package.name', 'build.release', - 'build.source', 'build.start_time', - "date_part('epoch', build.start_time)", 'build.state', 'build.task_id', - 'build.version', 'volume.id', 'volume.name']) def test_non_exist_build_dict_with_strict(self): build = { @@ -103,21 +75,7 @@ class TestGetBuild(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] self.assertEqual(query.tables, ['build']) - self.assertEqual(query.joins, ['events ON build.create_event = events.id', - 'package on build.pkg_id = package.id', - 'volume on build.volume_id = volume.id', - 'users on build.owner = users.id']) self.assertEqual(query.clauses, ['build.id = %(buildID)i']) - self.assertEqual(query.columns, - ['build.id', 'build.cg_id', 'build.completion_time', - "date_part('epoch', build.completion_time)", 'events.id', 'events.time', - "date_part('epoch', events.time)", 'build.epoch', 'build.extra', - 'build.id', 'package.name', - "package.name || '-' || build.version || '-' || build.release", - 'users.id', 'users.name', 'package.id', 'package.name', 'build.release', - 'build.source', 'build.start_time', - "date_part('epoch', build.start_time)", 'build.state', 'build.task_id', - 'build.version', 'volume.id', 'volume.name']) def test_result_with_cg_id(self): build = 11 @@ -130,18 +88,4 @@ class TestGetBuild(DBQueryTestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] self.assertEqual(query.tables, ['build']) - self.assertEqual(query.joins, ['events ON build.create_event = events.id', - 'package on build.pkg_id = package.id', - 'volume on build.volume_id = volume.id', - 'users on build.owner = users.id']) self.assertEqual(query.clauses, ['build.id = %(buildID)i']) - self.assertEqual(query.columns, - ['build.id', 'build.cg_id', 'build.completion_time', - "date_part('epoch', build.completion_time)", 'events.id', 'events.time', - "date_part('epoch', events.time)", 'build.epoch', 'build.extra', - 'build.id', 'package.name', - "package.name || '-' || build.version || '-' || build.release", - 'users.id', 'users.name', 'package.id', 'package.name', 'build.release', - 'build.source', 'build.start_time', - "date_part('epoch', build.start_time)", 'build.state', 'build.task_id', - 'build.version', 'volume.id', 'volume.name']) diff --git a/tests/test_hub/test_get_next_release.py b/tests/test_hub/test_get_next_release.py index 1ca0962..457e179 100644 --- a/tests/test_hub/test_get_next_release.py +++ b/tests/test_hub/test_get_next_release.py @@ -22,7 +22,8 @@ class TestGetNextRelease(DBQueryTestCase): self.assertEqual(query.tables, ['build']) self.assertEqual(query.joins, ['package ON build.pkg_id = package.id']) self.assertEqual(query.clauses, - ['name = %(name)s', 'state in %(states)s', 'version = %(version)s']) + ['NOT draft', 'name = %(name)s', 'state in %(states)s', + 'version = %(version)s']) self.assertEqual(query.values, {'name': self.binfo['name'], 'version': self.binfo['version'], 'states': (1, 2, 0) diff --git a/tests/test_hub/test_import_build.py b/tests/test_hub/test_import_build.py index 564075b..ddef0e9 100644 --- a/tests/test_hub/test_import_build.py +++ b/tests/test_hub/test_import_build.py @@ -82,6 +82,7 @@ class TestImportBuild(unittest.TestCase): 'version': 'version', 'release': 'release', 'id': 12345, + 'draft': False } # get_build called once to check for existing, # if it doesn't exist, called another time after creating @@ -92,6 +93,7 @@ class TestImportBuild(unittest.TestCase): fields = [ 'completion_time', + 'draft', 'epoch', 'extra', 'id', @@ -123,6 +125,7 @@ class TestImportBuild(unittest.TestCase): 'release': 'release', 'pkg_id': mock.ANY, 'id': mock.ANY, + 'draft': False } self._dml.assert_called_once_with(statement, values) diff --git a/tests/test_hub/test_import_rpm.py b/tests/test_hub/test_import_rpm.py index 014bdbb..cfc3326 100644 --- a/tests/test_hub/test_import_rpm.py +++ b/tests/test_hub/test_import_rpm.py @@ -42,11 +42,12 @@ class TestImportRPM(unittest.TestCase): 1003: 'epoch', 1006: 'buildtime', 1022: 'arch', - 1044: 'name-version-release.arch', + 1044: 'name-version-release.src.rpm', 1106: 'sourcepackage', 261: 'payload hash', } self.get_build = mock.patch('kojihub.kojihub.get_build').start() + self.new_build = mock.patch('kojihub.kojihub.new_build').start() self.get_rpm_header = mock.patch('koji.get_rpm_header').start() self.new_typed_build = mock.patch('kojihub.kojihub.new_typed_build').start() self.nextval = mock.patch('kojihub.kojihub.nextval').start() @@ -65,6 +66,7 @@ class TestImportRPM(unittest.TestCase): kojihub.import_rpm("this does not exist") def test_import_rpm_failed_build(self): + self.os_path_basename.return_value = 'name-version-release.arch.rpm' self.get_rpm_header.return_value = self.rpm_header_retval self.get_build.return_value = { 'state': koji.BUILD_STATES['FAILED'], @@ -72,8 +74,9 @@ class TestImportRPM(unittest.TestCase): 'version': 'version', 'release': 'release', } - with self.assertRaises(koji.GenericError): + with self.assertRaises(koji.GenericError) as cm: kojihub.import_rpm(self.filename) + self.assertEqual("Build is FAILED: name-version-release", str(cm.exception)) self.assertEqual(len(self.inserts), 0) def test_import_rpm_completed_build(self): @@ -94,6 +97,7 @@ class TestImportRPM(unittest.TestCase): 'name': 'name', 'arch': 'arch', 'buildtime': 'buildtime', + 'draft': False, 'payloadhash': '7061796c6f61642068617368', 'epoch': 'epoch', 'version': 'version', @@ -114,7 +118,7 @@ class TestImportRPM(unittest.TestCase): retval = copy.copy(self.rpm_header_retval) retval.update({ 'filename': 'name-version-release.arch.rpm', - 1044: 'name-version-release.src', + 1044: 'name-version-release.src.rpm.bad', 1022: 'src', 1106: 1, }) @@ -133,6 +137,7 @@ class TestImportRPM(unittest.TestCase): 'name': 'name', 'arch': 'src', 'buildtime': 'buildtime', + 'draft': False, 'payloadhash': '7061796c6f61642068617368', 'epoch': 'epoch', 'version': 'version', @@ -149,10 +154,9 @@ class TestImportRPM(unittest.TestCase): self.assertEqual(insert.rawdata, {}) def test_non_exist_file(self): - basename = 'rpm-1-34' self.os_path_exists.return_value = False with self.assertRaises(koji.GenericError) as cm: - kojihub.import_rpm(self.filename, basename) + kojihub.import_rpm(self.filename) self.assertEqual(f"No such file: {self.filename}", str(cm.exception)) self.assertEqual(len(self.inserts), 0) @@ -172,3 +176,139 @@ class TestImportRPM(unittest.TestCase): kojihub.import_rpm(self.src_filename) self.assertEqual("No such build", str(cm.exception)) self.assertEqual(len(self.inserts), 0) + + def test_import_draft_rpm_completed_build(self): + self.os_path_basename.return_value = 'name-version-release.arch.rpm' + self.get_rpm_header.return_value = self.rpm_header_retval + self.get_build.return_value = { + 'state': koji.BUILD_STATES['COMPLETE'], + 'name': 'name', + 'version': 'version', + 'release': 'release', + 'id': 12345, + } + self.nextval.return_value = 9876 + kojihub.import_rpm(self.filename) + + data = { + 'build_id': 12345, + 'name': 'name', + 'arch': 'arch', + 'buildtime': 'buildtime', + 'draft': False, + 'payloadhash': '7061796c6f61642068617368', + 'epoch': 'epoch', + 'version': 'version', + 'buildroot_id': None, + 'release': 'release', + 'external_repo_id': 0, + 'id': 9876, + 'size': 0, + } + self.assertEqual(len(self.inserts), 1) + insert = self.inserts[0] + self.assertEqual(insert.table, 'rpminfo') + self.assertEqual(insert.data, data) + self.assertEqual(insert.rawdata, {}) + + def test_import_draft_rpm_invalid_release(self): + self.os_path_basename.return_value = 'name-version-release.arch.rpm' + self.get_rpm_header.return_value = self.rpm_header_retval + + buildinfo = { + 'state': koji.BUILD_STATES['DELETED'], + 'name': 'name', + 'version': 'version', + 'release': 'badrelease', + 'id': 12345, + 'draft': True + } + + with self.assertRaises(koji.GenericError) as cm: + kojihub.import_rpm(self.filename, buildinfo=buildinfo) + self.assertEqual( + 'draft release: badrelease is not in valid format', + str(cm.exception) + ) + self.assertEqual(len(self.inserts), 0) + + def test_import_draft_rpm_valid(self): + self.os_path_basename.return_value = 'name-version-release.arch.rpm' + self.get_rpm_header.return_value = self.rpm_header_retval + + buildinfo = { + 'state': koji.BUILD_STATES['COMPLETE'], + 'name': 'name', + 'version': 'version', + 'release': 'release,draft_12345', + 'id': 12345, + 'draft': True, + 'extra': { + 'draft': { + 'target_release': 'release' + } + } + } + self.nextval.return_value = 9876 + kojihub.import_rpm(self.filename, buildinfo=buildinfo) + data = { + 'build_id': 12345, + 'name': 'name', + 'arch': 'arch', + 'buildtime': 'buildtime', + 'draft': True, + 'payloadhash': '7061796c6f61642068617368', + 'epoch': 'epoch', + 'version': 'version', + 'buildroot_id': None, + 'release': 'release', + 'external_repo_id': 0, + 'id': 9876, + 'size': 0, + } + self.assertEqual(len(self.inserts), 1) + insert = self.inserts[0] + self.assertEqual(insert.table, 'rpminfo') + self.assertEqual(insert.data, data) + self.assertEqual(insert.rawdata, {}) + + def test_import_draft_srpm_with_buildinfo(self): + self.os_path_basename.return_value = 'name-version-release.src.rpm' + retval = copy.copy(self.rpm_header_retval) + retval.update({ + 'filename': 'name-version-release.src.rpm', + 1044: 'name-version-release.src.rpm.bad', + 1022: 'src', + 1106: 1, + }) + self.get_rpm_header.return_value = retval + buildinfo = { + 'state': koji.BUILD_STATES['COMPLETE'], + 'name': 'name', + 'version': 'version', + 'release': 'release,draft_12345', + 'id': 12345, + 'draft': True + } + self.nextval.return_value = 9876 + kojihub.import_rpm(self.src_filename, buildinfo=buildinfo) + data = { + 'build_id': 12345, + 'name': 'name', + 'arch': 'src', + 'buildtime': 'buildtime', + 'draft': True, + 'payloadhash': '7061796c6f61642068617368', + 'epoch': 'epoch', + 'version': 'version', + 'buildroot_id': None, + 'release': 'release', + 'external_repo_id': 0, + 'id': 9876, + 'size': 0, + } + self.assertEqual(len(self.inserts), 1) + insert = self.inserts[0] + self.assertEqual(insert.table, 'rpminfo') + self.assertEqual(insert.data, data) + self.assertEqual(insert.rawdata, {}) diff --git a/tests/test_hub/test_list_builds.py b/tests/test_hub/test_list_builds.py index b54015a..239a1dc 100644 --- a/tests/test_hub/test_list_builds.py +++ b/tests/test_hub/test_list_builds.py @@ -17,6 +17,48 @@ class TestListBuilds(unittest.TestCase): return query def setUp(self): + # defaults + self.tables= ['build'] + # note: QueryProcessor reports these sorted by alias + self.columns = [ + 'build.id', + 'build.completion_time', + "date_part('epoch', build.completion_time)", + 'events.id', + 'events.time', + "date_part('epoch', events.time)", + 'build.draft', + 'build.epoch', + 'build.extra', + 'package.name', + "package.name || '-' || build.version || '-' || build.release", + 'users.id', + 'users.name', + 'package.id', + 'package.name', + 'promoter.id', + 'promoter.name', + 'build.promotion_time', + "date_part('epoch', build.promotion_time)", + 'build.release', + 'build.source', + 'build.start_time', + "date_part('epoch', build.start_time)", + 'build.state', + 'build.task_id', + 'build.version', + 'volume.id', + 'volume.name', + ] + self.clauses = ['package.id = %(packageID)i'] + self.joins = [ + 'LEFT JOIN events ON build.create_event = events.id', + 'LEFT JOIN package ON build.pkg_id = package.id', + 'LEFT JOIN volume ON build.volume_id = volume.id', + 'LEFT JOIN users ON build.owner = users.id', + 'LEFT JOIN users AS promoter ON build.promoter = promoter.id', + ] + self.maxDiff = None self.exports = kojihub.RootExports() self.query_executeOne = mock.MagicMock() @@ -41,7 +83,8 @@ class TestListBuilds(unittest.TestCase): 'task_id': 879, 'version': '11', 'volume_id': 0, - 'volume_name': 'DEFAULT'}] + 'volume_name': 'DEFAULT', + 'draft': False},] def test_wrong_package(self): package = 'test-package' @@ -58,26 +101,27 @@ class TestListBuilds(unittest.TestCase): self.assertEqual(len(self.queries), 1) args, kwargs = self.QueryProcessor.call_args qp = QP(**kwargs) - self.assertEqual(qp.tables, ['build']) - self.assertEqual(qp.columns, ['build.id', 'build.completion_time', - "date_part('epoch', build.completion_time)", - 'events.id', 'events.time', - "date_part('epoch', events.time)", 'build.epoch', - 'build.extra', 'package.name', - "package.name || '-' || build.version || '-' || " - "build.release", 'users.id', 'users.name', 'package.id', - 'package.name', 'build.release', 'build.source', - 'build.start_time', "date_part('epoch', build.start_time)", - 'build.state', 'build.task_id', 'build.version', - 'volume.id', 'volume.name']) - self.assertEqual(qp.clauses, ['package.id = %(packageID)i']) - self.assertEqual(qp.joins, ['LEFT JOIN events ON build.create_event = events.id', - 'LEFT JOIN package ON build.pkg_id = package.id', - 'LEFT JOIN volume ON build.volume_id = volume.id', - 'LEFT JOIN users ON build.owner = users.id']) + self.assertEqual(qp.tables, self.tables) + self.assertEqual(qp.columns, self.columns) + self.assertEqual(qp.clauses, self.clauses) + self.assertEqual(qp.joins, self.joins) def test_wrong_user(self): user = 'test-user' self.get_user.return_value = None rv = self.exports.listBuilds(userID=user) self.assertEqual(rv, []) + + def test_draft(self): + package = 'test-package' + package_id = 1 + self.get_package_id.return_value = package_id + self.query_executeOne.return_value = None + self.exports.listBuilds(packageID=package, draft=True) + self.assertEqual(len(self.queries), 1) + args, kwargs = self.QueryProcessor.call_args + qp = QP(**kwargs) + self.assertEqual(qp.tables, self.tables) + self.assertEqual(qp.columns, self.columns) + self.assertEqual(qp.clauses, ['draft IS TRUE'] + self.clauses) + self.assertEqual(qp.joins, self.joins) diff --git a/tests/test_hub/test_new_build.py b/tests/test_hub/test_new_build.py index ab4a270..5b8f6f3 100644 --- a/tests/test_hub/test_new_build.py +++ b/tests/test_hub/test_new_build.py @@ -9,20 +9,24 @@ IP = kojihub.InsertProcessor class TestNewBuild(unittest.TestCase): def setUp(self): - self.get_rpm = mock.patch('kojihub.kojihub.get_rpm').start() - self.get_external_repo_id = mock.patch('kojihub.kojihub.get_external_repo_id').start() - self.nextval = mock.patch('kojihub.kojihub.nextval').start() - self.Savepoint = mock.patch('kojihub.kojihub.Savepoint').start() - self.InsertProcessor = mock.patch('kojihub.kojihub.InsertProcessor', - side_effect=self.getInsert).start() + self.get_rpm = mock.patch("kojihub.kojihub.get_rpm").start() + self.get_external_repo_id = mock.patch( + "kojihub.kojihub.get_external_repo_id" + ).start() + self.nextval = mock.patch("kojihub.kojihub.nextval").start() + self.Savepoint = mock.patch("kojihub.kojihub.Savepoint").start() + self.InsertProcessor = mock.patch( + "kojihub.kojihub.InsertProcessor", side_effect=self.getInsert + ).start() self.inserts = [] self.insert_execute = mock.MagicMock() - self.lookup_package = mock.patch('kojihub.kojihub.lookup_package').start() - self.new_package = mock.patch('kojihub.kojihub.new_package').start() - self.get_user = mock.patch('kojihub.kojihub.get_user').start() - self.get_build = mock.patch('kojihub.kojihub.get_build').start() - self.recycle_build = mock.patch('kojihub.kojihub.recycle_build').start() - self.context = mock.patch('kojihub.kojihub.context').start() + self.lookup_package = mock.patch("kojihub.kojihub.lookup_package").start() + self.new_package = mock.patch("kojihub.kojihub.new_package").start() + self.get_user = mock.patch("kojihub.kojihub.get_user").start() + self.get_build = mock.patch("kojihub.kojihub.get_build").start() + self.recycle_build = mock.patch("kojihub.kojihub.recycle_build").start() + self.context = mock.patch("kojihub.kojihub.context").start() + self.find_build_id = mock.patch("kojihub.kojihub.find_build_id").start() def tearDown(self): mock.patch.stopall() @@ -37,36 +41,40 @@ class TestNewBuild(unittest.TestCase): self.get_build.return_value = None self.nextval.return_value = 65 # free build id self.new_package.return_value = 54 - self.get_user.return_value = {'id': 123} + self.get_user.return_value = {"id": 123} data = { - 'name': 'test_name', - 'version': 'test_version', - 'release': 'test_release', - 'epoch': 'test_epoch', - 'owner': 'test_owner', - 'extra': {'extra_key': 'extra_value'}, + "name": "test_name", + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", + "owner": "test_owner", + "extra": {"extra_key": "extra_value"}, } kojihub.new_build(data) self.assertEqual(len(self.inserts), 1) insert = self.inserts[0] - self.assertEqual(insert.table, 'build') - self.assertEqual(insert.data, { - 'completion_time': 'NOW', - 'epoch': 'test_epoch', - 'extra': '{"extra_key": "extra_value"}', - 'id': 65, - 'owner': 123, - 'pkg_id': 54, - 'release': 'test_release', - 'source': None, - 'start_time': 'NOW', - 'state': 1, - 'task_id': None, - 'version': 'test_version', - 'volume_id': 0 - }) + self.assertEqual(insert.table, "build") + self.assertEqual( + insert.data, + { + "completion_time": "NOW", + "epoch": "test_epoch", + "extra": '{"extra_key": "extra_value"}', + "id": 65, + "owner": 123, + "pkg_id": 54, + "release": "test_release", + "source": None, + "start_time": "NOW", + "state": 1, + "task_id": None, + "draft": False, + "version": "test_version", + "volume_id": 0, + }, + ) def test_empty_data(self): with self.assertRaises(koji.GenericError): @@ -76,13 +84,13 @@ class TestNewBuild(unittest.TestCase): def test_wrong_pkg_id(self): self.lookup_package.side_effect = koji.GenericError data = { - 'pkg_id': 444, - 'name': 'test_name', - 'version': 'test_version', - 'release': 'test_release', - 'epoch': 'test_epoch', - 'owner': 'test_owner', - 'extra': {'extra_key': 'extra_value'}, + "pkg_id": 444, + "name": "test_name", + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", + "owner": "test_owner", + "extra": {"extra_key": "extra_value"}, } with self.assertRaises(koji.GenericError): @@ -92,11 +100,11 @@ class TestNewBuild(unittest.TestCase): def test_missing_pkg_id_name(self): data = { - 'version': 'test_version', - 'release': 'test_release', - 'epoch': 'test_epoch', - 'owner': 'test_owner', - 'extra': {'extra_key': 'extra_value'}, + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", + "owner": "test_owner", + "extra": {"extra_key": "extra_value"}, } with self.assertRaises(koji.GenericError) as cm: @@ -108,12 +116,12 @@ class TestNewBuild(unittest.TestCase): def test_wrong_owner(self): self.get_user.side_effect = koji.GenericError data = { - 'owner': 123456, - 'name': 'test_name', - 'version': 'test_version', - 'release': 'test_release', - 'epoch': 'test_epoch', - 'extra': {'extra_key': 'extra_value'}, + "owner": 123456, + "name": "test_name", + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", + "extra": {"extra_key": "extra_value"}, } with self.assertRaises(koji.GenericError): @@ -123,13 +131,13 @@ class TestNewBuild(unittest.TestCase): def test_missing_vre(self): data = { - 'name': 'test_name', - 'version': 'test_version', - 'release': 'test_release', - 'epoch': 'test_epoch', + "name": "test_name", + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", } - for item in ('version', 'release', 'epoch'): + for item in ("version", "release", "epoch"): d = data.copy() del d[item] with self.assertRaises(koji.GenericError): @@ -143,16 +151,88 @@ class TestNewBuild(unittest.TestCase): pass data = { - 'owner': 123456, - 'name': 'test_name', - 'version': 'test_version', - 'release': 'test_release', - 'epoch': 'test_epoch', - 'extra': {'extra_key': CantDoJSON()}, + "owner": 123456, + "name": "test_name", + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", + "extra": {"extra_key": CantDoJSON()}, } with self.assertRaises(koji.GenericError) as cm: kojihub.new_build(data) self.assertEqual(len(self.inserts), 0) - self.assertEqual("No such build extra data: %(extra)r" % data, str(cm.exception)) + self.assertEqual( + "No such build extra data: %(extra)r" % data, str(cm.exception) + ) + + def test_draft(self): + data = { + "owner": 123456, + "name": "test_name", + "version": "test_version", + "release": "test_release", + "epoch": "test_epoch", + "draft": True, + } + insert_data = { + "completion_time": "NOW", + "epoch": "test_epoch", + "id": 108, + "extra": None, + "owner": 123, + "pkg_id": 54, + "release": "test_release,draft_108", + "source": None, + "start_time": "NOW", + "state": 1, + "task_id": None, + "draft": True, + "version": "test_version", + "volume_id": 0, + } + self.nextval.return_value = 108 + self.new_package.return_value = 54 + self.get_user.return_value = {"id": 123} + self.get_build.side_effect = [None, mock.ANY] + self.find_build_id.return_value = None + + kojihub.new_build(data) + + self.assertEqual(len(self.inserts), 1) + insert = self.inserts[0] + self.assertEqual(insert.table, "build") + self.assertEqual(insert.data, insert_data) + self.get_build.assert_has_calls( + [ + mock.call( + # it looks like a "draft build" because we use the "data" reference + { + "owner": 123, + "name": "test_name", + "version": "test_version", + "release": "test_release,draft_108", + "epoch": "test_epoch", + "draft": True, + "pkg_id": 54, + "extra": None, + "state": 1, + "start_time": "NOW", + "completion_time": "NOW", + "source": None, + "task_id": None, + "volume_id": 0, + "id": 108, + } + ), + mock.call(108, strict=True), + ] + ) + self.find_build_id.assert_called_once_with( + { + "name": "test_name", + "version": "test_version", + "release": "test_release,draft_108", + } + ) diff --git a/tests/test_hub/test_promote_build.py b/tests/test_hub/test_promote_build.py new file mode 100644 index 0000000..e18f0a3 --- /dev/null +++ b/tests/test_hub/test_promote_build.py @@ -0,0 +1,179 @@ +import datetime +import json +import mock +import unittest + +import koji +import kojihub + + +UP = kojihub.UpdateProcessor + + +class TestPromoteBuild(unittest.TestCase): + + def getUpdate(self, *args, **kwargs): + update = UP(*args, **kwargs) + update.execute = mock.MagicMock() + self.updates.append(update) + return update + + def setUp(self): + self.exports = kojihub.RootExports() + self.UpdateProcessor = mock.patch('kojihub.kojihub.UpdateProcessor', + side_effect=self.getUpdate).start() + self.updates = [] + self.context = mock.patch('kojihub.kojihub.context').start() + self.context.session.assertLogin = mock.MagicMock() + self.user = {'id': 1, 'name': 'jdoe'} + self.get_user = mock.patch('kojihub.kojihub.get_user', return_value=self.user).start() + self.get_build = mock.patch('kojihub.kojihub.get_build').start() + self.assert_policy = mock.patch('kojihub.kojihub.assert_policy').start() + self.apply_volume_policy = mock.patch('kojihub.kojihub.apply_volume_policy', + return_value=None).start() + self.safer_move = mock.patch('kojihub.kojihub.safer_move').start() + self.ensure_volume_symlink = mock.patch('kojihub.kojihub.ensure_volume_symlink').start() + self.lookup_name = mock.patch('kojihub.kojihub.lookup_name', + return_value={'id': 1, 'name': 'DEFAULT'}).start() + self.os_symlink = mock.patch('os.symlink').start() + self.list_tags = mock.patch('kojihub.kojihub.list_tags', + return_value=[{'id': 101}]).start() + self.set_tag_update = mock.patch('kojihub.kojihub.set_tag_update').start() + self._now = datetime.datetime.now() + self._datetime = mock.patch('kojihub.kojihub.datetime.datetime').start() + self.now = self._datetime.now = mock.MagicMock(return_value=self._now) + + self.draft_build = { + 'id': 1, + 'name': 'foo', + 'version': 'bar', + 'release': 'tgtrel,draft_1', + 'nvr': 'testnvr', + 'state': 1, + 'draft': True, + 'volume_id': 99, + 'volume_name': 'X', + 'task_id': 222 + } + + self.new_build = { + # no check on the info + 'id': 1, + 'name': 'foo', + 'version': 'bar', + 'release': 'tgtrel', + 'volume_name': 'X' + } + + def tearDown(self): + mock.patch.stopall() + + def test_promote_build_valid(self): + self.get_build.side_effect = [ + self.draft_build, + None, + self.new_build + ] + + ret = self.exports.promoteBuild('a-draft-build') + self.assertEqual(ret, self.new_build) + self.assertEqual(len(self.updates), 1) + update = self.updates[0] + self.assertEqual(update.table, 'build') + self.assertEqual(update.values, self.draft_build) + self.assertEqual(update.data, {'draft': False, + 'promoter': self.user['id'], + 'release': 'tgtrel'}) + self.assertEqual(update.rawdata, {'promotion_time': 'now()'}) + self.assertEqual(update.clauses, ['id=%(id)i']) + self.apply_volume_policy.assert_called_once_with( + self.new_build, strict=False + ) + self.safer_move.assert_called_once_with( + '/mnt/koji/vol/X/packages/foo/bar/tgtrel,draft_1', + '/mnt/koji/vol/X/packages/foo/bar/tgtrel' + ) + self.os_symlink.assert_called_once_with( + '../../../../../packages/foo/bar/tgtrel', + '/mnt/koji/vol/X/packages/foo/bar/tgtrel,draft_1' + ) + + def test_promote_build_not_draft(self): + self.get_build.return_value = {'draft': False, 'nvr': 'testnvr'} + + with self.assertRaises(koji.GenericError) as cm: + self.exports.promoteBuild('a-regular-build') + self.assertEqual( + str(cm.exception), + "Cannot promote build testnvr. Reason: Not a draft build" + ) + self.assertEqual(len(self.updates), 0) + + def test_promote_build_target_release(self): + draft = { + 'id': 1, + 'name': 'foo', + 'version': 'bar', + # bad delimiter + 'release': 'tgtrel@draft_1', + 'nvr': 'testnvr', + 'state': 1, + 'draft': True, + 'volume_id': 99, + 'volume_name': 'X', + 'task_id': 222 + } + + self.get_build.return_value = draft + + with self.assertRaises(koji.GenericError) as cm: + self.exports.promoteBuild('a-regular-build') + self.assertEqual( + str(cm.exception), + "draft release: tgtrel@draft_1 is not in valid format" + ) + self.assertEqual(len(self.updates), 0) + + def test_promote_build_not_completed(self): + draft = { + 'id': 1, + 'name': 'foo', + 'version': 'bar', + 'release': 'tgtrel#draft_1', + 'nvr': 'testnvr', + 'draft': True, + 'state': 0, + 'volume_id': 99, + 'volume_name': 'X', + 'task_id': 222 + } + + self.get_build.return_value = draft + + with self.assertRaises(koji.GenericError) as cm: + self.exports.promoteBuild('a-regular-build') + self.assertEqual( + str(cm.exception), + f"Cannot promote build {draft['nvr']}. Reason: state (BUILDING) is not COMPLETE." + ) + self.assertEqual(len(self.updates), 0) + + def test_promote_build_target_build_exists(self): + old = { + 'id': 'any', + 'nvr': 'oldnvr' + } + self.get_build.side_effect = [self.draft_build, old] + + with self.assertRaises(koji.GenericError) as cm: + self.exports.promoteBuild('a-regular-build') + self.assertEqual( + str(cm.exception), + "Cannot promote build testnvr. Reason: Target build exists: oldnvr(#any)" + ) + self.assertEqual(len(self.updates), 0) + self.get_build.assert_called_with({ + 'name': 'foo', + 'version': 'bar', + 'release': 'tgtrel' + }, strict=False) diff --git a/tests/test_hub/test_read_tagged_builds.py b/tests/test_hub/test_read_tagged_builds.py index 96240f6..e48df0b 100644 --- a/tests/test_hub/test_read_tagged_builds.py +++ b/tests/test_hub/test_read_tagged_builds.py @@ -6,6 +6,8 @@ import koji import kojihub import copy +from koji.util import dslice + QP = kojihub.QueryProcessor @@ -29,34 +31,32 @@ class TestReadTaggedBuilds(unittest.TestCase): self.readPackageList = mock.patch('kojihub.kojihub.readPackageList').start() self.lookup_name = mock.patch('kojihub.kojihub.lookup_name').start() self.tag_name = 'test-tag' - self.columns = ['tag.id', 'tag.name', 'build.id', 'build.version', 'build.release', - 'build.epoch', 'build.state', 'build.completion_time', 'build.start_time', - 'build.task_id', 'users.id', 'users.name', 'events.id', 'events.time', - 'volume.id', 'volume.name', 'package.id', 'package.name', - 'package.name || \'-\' || build.version || \'-\' || build.release', - 'tag_listing.create_event'] - self.fields = [('tag.id', 'tag_id'), ('tag.name', 'tag_name'), ('build.id', 'id'), - ('build.id', 'build_id'), ('build.version', 'version'), - ('build.release', 'release'), ('build.epoch', 'epoch'), - ('build.state', 'state'), ('build.completion_time', 'completion_time'), - ('build.start_time', 'start_time'), ('build.task_id', 'task_id'), - ('users.id', 'owner_id'), ('users.name', 'owner_name'), - ('events.id', 'creation_event_id'), ('events.time', 'creation_time'), - ('volume.id', 'volume_id'), ('volume.name', 'volume_name'), - ('package.id', 'package_id'), ('package.name', 'package_name'), - ('package.name', 'name'), - ("package.name || '-' || build.version || '-' || build.release", 'nvr'), - ('tag_listing.create_event', 'create_event')] + self.columns = ['build.id', 'build.completion_time', 'tag_listing.create_event', + 'events.id', 'events.time', 'build.draft', 'build.epoch', 'build.id', + 'package.name', + "package.name || '-' || build.version || '-' || build.release", + 'users.id', 'users.name', 'package.id', 'package.name', 'promoter.id', + 'promoter.name', 'build.promotion_time', 'build.release', + 'build.start_time', 'build.state', 'tag.id', 'tag.name', 'build.task_id', + 'build.version', 'volume.id', 'volume.name'] + self.values = {'owner': None, + 'package': None, + 'st_complete': 1, + 'tagid': self.tag_name, + } self.joins = ['tag ON tag.id = tag_listing.tag_id', 'build ON build.id = tag_listing.build_id', 'events ON events.id = build.create_event', 'package ON package.id = build.pkg_id', 'volume ON volume.id = build.volume_id', - 'users ON users.id = build.owner', ] - self.aliases = ['tag_id', 'tag_name', 'id', 'build_id', 'version', 'release', 'epoch', - 'state', 'completion_time', 'start_time', 'task_id', 'owner_id', - 'owner_name', 'creation_event_id', 'creation_time', 'volume_id', - 'volume_name', 'package_id', 'package_name', 'name', 'nvr', 'create_event'] + 'users ON users.id = build.owner', + 'LEFT JOIN users AS promoter ON promoter.id = build.promoter', + ] + self.aliases = ['build_id', 'completion_time', 'create_event', 'creation_event_id', + 'creation_time', 'draft', 'epoch', 'id', 'name', 'nvr', 'owner_id', + 'owner_name', 'package_id', 'package_name', 'promoter_id', + 'promoter_name', 'promotion_time', 'release', 'start_time', 'state', + 'tag_id', 'tag_name', 'task_id', 'version', 'volume_id', 'volume_name'] self.clauses = ['(tag_listing.active = TRUE)', 'tag_id = %(tagid)s', 'build.state = %(st_complete)i'] @@ -78,19 +78,14 @@ class TestReadTaggedBuilds(unittest.TestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] - values = {'clauses': self.clauses, 'event': None, 'extra': False, 'fields': self.fields, - 'inherit': False, 'joins': self.joins, 'latest': False, 'owner': None, - 'package': None, 'packages': self.package_list, - 'queryOpts': {'order': '-create_event'}, 'st_complete': 1, 'tables': self.tables, - 'tag': self.tag_name, 'tagid': self.tag_name, 'taglist': [self.tag_name], - 'type': None - } + values = self.values.copy() self.assertEqual(query.tables, self.tables) self.assertEqual(query.joins, self.joins) self.assertEqual(set(query.columns), set(self.columns)) self.assertEqual(set(query.aliases), set(self.aliases)) self.assertEqual(set(query.clauses), set(self.clauses)) - self.assertEqual(query.values, values) + # function passes values=locals(), so we only check the relevant values + self.assertEqual(dslice(query.values, values.keys()), values) def test_get_tagged_builds_package_owner_type_maven_extra(self): self.readPackageList.return_value = self.package_list @@ -105,27 +100,21 @@ class TestReadTaggedBuilds(unittest.TestCase): 'maven_builds.version', 'build.extra']) aliases = copy.deepcopy(self.aliases) aliases.extend(['maven_group_id', 'maven_artifact_id', 'maven_version', 'extra']) - fields = copy.deepcopy(self.fields) - fields.extend([('maven_builds.group_id', 'maven_group_id'), - ('maven_builds.artifact_id', 'maven_artifact_id'), - ('maven_builds.version', 'maven_version'), ('build.extra', 'extra')]) clauses = copy.deepcopy(self.clauses) clauses.extend(['package.name = %(package)s', 'users.name = %(owner)s']) joins = copy.deepcopy(self.joins) joins.append('maven_builds ON maven_builds.build_id = tag_listing.build_id') - values = {'clauses': clauses, 'event': None, 'extra': True, 'fields': fields, - 'inherit': False, 'joins': joins, 'latest': False, 'owner': self.username, - 'package': self.pkg_name, 'packages': self.package_list, - 'queryOpts': {'order': '-create_event'}, 'st_complete': 1, 'tables': self.tables, - 'tag': self.tag_name, 'tagid': self.tag_name, 'taglist': [self.tag_name], - 'type': 'maven'} + values = self.values.copy() + values['owner'] = self.username + values['package'] = self.pkg_name self.assertEqual(query.tables, self.tables) self.assertEqual(query.joins, joins) self.assertEqual(set(query.columns), set(columns)) self.assertEqual(set(query.aliases), set(aliases)) self.assertEqual(set(query.clauses), set(clauses)) - self.assertEqual(query.values, values) + # function passes values=locals(), so we only check the relevant values + self.assertEqual(dslice(query.values, values.keys()), values) def test_get_tagged_builds_type_win_latest(self): self.readPackageList.return_value = self.package_list @@ -138,23 +127,17 @@ class TestReadTaggedBuilds(unittest.TestCase): columns.append('win_builds.platform') aliases = copy.deepcopy(self.aliases) aliases.append('platform') - fields = copy.deepcopy(self.fields) - fields.append(('win_builds.platform', 'platform')) joins = copy.deepcopy(self.joins) joins.append('win_builds ON win_builds.build_id = tag_listing.build_id') - values = {'clauses': self.clauses, 'event': None, 'extra': False, 'fields': fields, - 'inherit': False, 'joins': joins, 'latest': True, 'owner': None, - 'package': None, 'packages': self.package_list, - 'queryOpts': {'order': '-create_event'}, 'st_complete': 1, 'tables': self.tables, - 'tag': self.tag_name, 'tagid': self.tag_name, 'taglist': [self.tag_name], - 'type': 'win'} + values = self.values.copy() self.assertEqual(query.tables, self.tables) self.assertEqual(query.joins, joins) self.assertEqual(set(query.columns), set(columns)) self.assertEqual(set(query.aliases), set(aliases)) self.assertEqual(set(query.clauses), set(self.clauses)) - self.assertEqual(query.values, values) + # function passes values=locals(), so we only check the relevant values + self.assertEqual(dslice(query.values, values.keys()), values) def test_get_tagged_builds_type_image(self): self.readPackageList.return_value = self.package_list @@ -167,23 +150,17 @@ class TestReadTaggedBuilds(unittest.TestCase): columns.append('image_builds.build_id') aliases = copy.deepcopy(self.aliases) aliases.append('build_id') - fields = copy.deepcopy(self.fields) - fields.append(('image_builds.build_id', 'build_id')) joins = copy.deepcopy(self.joins) joins.append('image_builds ON image_builds.build_id = tag_listing.build_id') - values = {'clauses': self.clauses, 'event': None, 'extra': False, 'fields': fields, - 'inherit': False, 'joins': joins, 'latest': False, 'owner': None, - 'package': None, 'packages': self.package_list, - 'queryOpts': {'order': '-create_event'}, 'st_complete': 1, 'tables': self.tables, - 'tag': self.tag_name, 'tagid': self.tag_name, 'taglist': [self.tag_name], - 'type': 'image'} + values = self.values.copy() self.assertEqual(query.tables, self.tables) self.assertEqual(query.joins, joins) self.assertEqual(set(query.columns), set(columns)) self.assertEqual(set(query.aliases), set(aliases)) self.assertEqual(set(query.clauses), set(self.clauses)) - self.assertEqual(query.values, values) + # function passes values=locals(), so we only check the relevant values + self.assertEqual(dslice(query.values, values.keys()), values) def test_get_tagged_builds_type_non_exist(self): self.readPackageList.return_value = self.package_list @@ -206,16 +183,30 @@ class TestReadTaggedBuilds(unittest.TestCase): joins = copy.deepcopy(self.joins) joins.append('build_types ON build.id = build_types.build_id AND btype_id = %(btype_id)s') - values = {'btype': typeinfo, 'btype_id': typeinfo['id'], 'clauses': self.clauses, - 'event': None, 'extra': False, 'fields': self.fields, - 'inherit': False, 'joins': joins, 'latest': False, 'owner': None, - 'package': None, 'packages': self.package_list, - 'queryOpts': {'order': '-create_event'}, 'st_complete': 1, 'tables': self.tables, - 'tag': self.tag_name, 'tagid': self.tag_name, 'taglist': [self.tag_name], - 'type': type} + values = self.values.copy() self.assertEqual(query.tables, self.tables) self.assertEqual(query.joins, joins) self.assertEqual(set(query.columns), set(self.columns)) self.assertEqual(set(query.aliases), set(self.aliases)) self.assertEqual(set(query.clauses), set(self.clauses)) - self.assertEqual(query.values, values) + # function passes values=locals(), so we only check the relevant values + self.assertEqual(dslice(query.values, values.keys()), values) + + def test_get_tagged_builds_draft(self): + self.readPackageList.return_value = self.package_list + kojihub.readTaggedBuilds(self.tag_name, draft=True) + + self.assertEqual(len(self.queries), 1) + query = self.queries[0] + + clauses = copy.deepcopy(self.clauses) + clauses.extend(['draft IS TRUE']) + + values = self.values.copy() + self.assertEqual(query.tables, self.tables) + self.assertEqual(query.joins, self.joins) + self.assertEqual(set(query.columns), set(self.columns)) + self.assertEqual(set(query.aliases), set(self.aliases)) + self.assertEqual(set(query.clauses), set(clauses)) + # function passes values=locals(), so we only check the relevant values + self.assertEqual(dslice(query.values, values.keys()), values) diff --git a/tests/test_hub/test_read_tagged_rpms.py b/tests/test_hub/test_read_tagged_rpms.py index 057ceb3..a39f996 100644 --- a/tests/test_hub/test_read_tagged_rpms.py +++ b/tests/test_hub/test_read_tagged_rpms.py @@ -29,12 +29,13 @@ class TestReadTaggedRPMS(unittest.TestCase): self.readTaggedBuilds = mock.patch('kojihub.kojihub.readTaggedBuilds').start() self.tag_name = 'test-tag' self.columns = ['rpminfo.name', 'rpminfo.version', 'rpminfo.release', 'rpminfo.arch', - 'rpminfo.id', 'rpminfo.epoch', 'rpminfo.payloadhash', 'rpminfo.size', - 'rpminfo.buildtime', 'rpminfo.buildroot_id', 'rpminfo.build_id', - 'rpminfo.metadata_only'] + 'rpminfo.id', 'rpminfo.epoch', 'rpminfo.draft', 'rpminfo.payloadhash', + 'rpminfo.size', 'rpminfo.buildtime', 'rpminfo.buildroot_id', + 'rpminfo.build_id', 'rpminfo.metadata_only'] self.joins = ['tag_listing ON rpminfo.build_id = tag_listing.build_id'] - self.aliases = ['name', 'version', 'release', 'arch', 'id', 'epoch', 'payloadhash', - 'size', 'buildtime', 'buildroot_id', 'build_id', 'metadata_only'] + self.aliases = ['name', 'version', 'release', 'arch', 'id', 'epoch', 'draft', + 'payloadhash', 'size', 'buildtime', 'buildroot_id', 'build_id', + 'metadata_only'] self.clauses = ['(tag_listing.active = TRUE)', 'tag_id=%(tagid)s'] self.tables = ['rpminfo'] @@ -101,3 +102,20 @@ class TestReadTaggedRPMS(unittest.TestCase): self.assertEqual(set(query.aliases), set(aliases)) self.assertEqual(set(query.clauses), set(clauses)) self.assertEqual(query.values, values) + + def test_get_tagged_rpms_draft(self): + self.readTaggedBuilds.return_value = self.build_list + kojihub.readTaggedRPMS(self.tag_name, draft=False, extra=False) + + self.assertEqual(len(self.queries), 1) + query = self.queries[0] + + clauses = copy.deepcopy(self.clauses) + clauses.extend(['rpminfo.draft IS NOT TRUE']) + + self.assertEqual(query.tables, self.tables) + self.assertEqual(set(query.columns), set(self.columns)) + self.assertEqual(set(query.joins), set(self.joins)) + self.assertEqual(set(query.aliases), set(self.aliases)) + self.assertEqual(set(query.clauses), set(clauses)) + self.assertEqual(query.values, {}) \ No newline at end of file diff --git a/tests/test_hub/test_reset_build.py b/tests/test_hub/test_reset_build.py index 13d31b2..23a79d9 100644 --- a/tests/test_hub/test_reset_build.py +++ b/tests/test_hub/test_reset_build.py @@ -10,7 +10,6 @@ UP = kojihub.UpdateProcessor class TestResetBuild(unittest.TestCase): - def getDelete(self, *args, **kwargs): delete = DP(*args, **kwargs) delete.execute = mock.MagicMock() @@ -31,26 +30,49 @@ class TestResetBuild(unittest.TestCase): def setUp(self): self.maxDiff = None - self.DeleteProcessor = mock.patch('kojihub.kojihub.DeleteProcessor', - side_effect=self.getDelete).start() + self.DeleteProcessor = mock.patch( + "kojihub.kojihub.DeleteProcessor", side_effect=self.getDelete + ).start() self.deletes = [] - self.QueryProcessor = mock.patch('kojihub.kojihub.QueryProcessor', - side_effect=self.getQuery).start() + self.QueryProcessor = mock.patch( + "kojihub.kojihub.QueryProcessor", side_effect=self.getQuery + ).start() self.queries = [] self.query_execute = mock.MagicMock() - self.UpdateProcessor = mock.patch('kojihub.kojihub.UpdateProcessor', - side_effect=self.getUpdate).start() + self.UpdateProcessor = mock.patch( + "kojihub.kojihub.UpdateProcessor", side_effect=self.getUpdate + ).start() self.updates = [] - self.get_build = mock.patch('kojihub.kojihub.get_build').start() - self.context = mock.patch('kojihub.kojihub.context').start() + self.get_build = mock.patch("kojihub.kojihub.get_build").start() + self.context = mock.patch("kojihub.kojihub.context").start() self.context.session.assertPerm = mock.MagicMock() + # don't remove anything unexpected + self.rmtree = mock.patch("koji.util.rmtree").start() + self.unlink = mock.patch("os.unlink").start() self.build_id = 3 - self.binfo = {'id': 3, 'state': koji.BUILD_STATES['COMPLETE'], 'name': 'test_nvr', - 'nvr': 'test_nvr-3.3-20.el8', 'version': '3.3', 'release': '20', - 'task_id': 12, 'volume_id': 1, 'build_id': 3} - self.del_binfo = {'id': 3, 'state': koji.BUILD_STATES['CANCELED'], - 'name': 'test_nvr', 'nvr': 'test_nvr-3.3-20.el8', 'version': '3.3', - 'release': '20', 'task_id': None, 'volume_id': 0} + self.binfo = { + "id": 3, + "state": koji.BUILD_STATES["COMPLETE"], + "name": "test_nvr", + "nvr": "test_nvr-3.3-20.el8", + "version": "3.3", + "release": "20", + "task_id": 12, + "volume_id": 1, + "build_id": 3, + "draft": False + } + self.del_binfo = { + "id": 3, + "state": koji.BUILD_STATES["CANCELED"], + "name": "test_nvr", + "nvr": "test_nvr-3.3-20.el8", + "version": "3.3", + "release": "20", + "task_id": None, + "volume_id": 0, + "draft": False + } def tearDown(self): mock.patch.stopall() @@ -58,128 +80,135 @@ class TestResetBuild(unittest.TestCase): def test_reset_build_queries(self): self.get_build.side_effect = [self.binfo, self.del_binfo] self.query_execute.side_effect = [ - [(123, )], - [(9999,)], + [(123,)], # rpm ids + [(9999,)], # archive ids + {"id": 0, "name": "DEFAULT"}, # volume DEFAULT + [ + {"id": 0, "name": "DEFAULT"}, + {"id": 1, "name": "testvol"}, + {"id": 2, "name": "other"}, + ], # list_volumes() ] kojihub.reset_build(self.build_id) - self.assertEqual(len(self.queries), 2) + self.assertEqual(len(self.queries), 4) query = self.queries[0] - self.assertEqual(query.tables, ['rpminfo']) + self.assertEqual(query.tables, ["rpminfo"]) self.assertEqual(query.joins, None) - self.assertEqual(query.clauses, ['build_id=%(id)i']) - self.assertEqual(query.columns, ['id']) - self.assertEqual(query.values, {'id': self.binfo['build_id']}) + self.assertEqual(query.clauses, ["build_id=%(id)i"]) + self.assertEqual(query.columns, ["id"]) + self.assertEqual(query.values, {"id": self.binfo["build_id"]}) query = self.queries[1] - self.assertEqual(query.tables, ['archiveinfo']) + self.assertEqual(query.tables, ["archiveinfo"]) self.assertEqual(query.joins, None) - self.assertEqual(query.clauses, ['build_id=%(id)i']) - self.assertEqual(query.columns, ['id']) - self.assertEqual(query.values, {'id': self.binfo['build_id']}) + self.assertEqual(query.clauses, ["build_id=%(id)i"]) + self.assertEqual(query.columns, ["id"]) + self.assertEqual(query.values, {"id": self.binfo["build_id"]}) self.assertEqual(len(self.updates), 1) update = self.updates[0] - self.assertEqual(update.table, 'build') - self.assertEqual(update.values, {'id': self.binfo['id']}) - self.assertEqual(update.data, {'state': 4, 'task_id': None, 'volume_id': 0}) + self.assertEqual(update.table, "build") + self.assertEqual(update.values, {"id": self.binfo["id"]}) + self.assertEqual(update.data, {"state": 4, "task_id": None, "volume_id": 0}) self.assertEqual(update.rawdata, {}) - self.assertEqual(update.clauses, ['id=%(id)s']) + self.assertEqual(update.clauses, ["id=%(id)s"]) self.assertEqual(len(self.deletes), 18) delete = self.deletes[0] - self.assertEqual(delete.table, 'rpmsigs') - self.assertEqual(delete.clauses, ['rpm_id=%(rpm_id)i']) - self.assertEqual(delete.values, {'rpm_id': 123}) + self.assertEqual(delete.table, "rpmsigs") + self.assertEqual(delete.clauses, ["rpm_id=%(rpm_id)i"]) + self.assertEqual(delete.values, {"rpm_id": 123}) delete = self.deletes[1] - self.assertEqual(delete.table, 'buildroot_listing') - self.assertEqual(delete.clauses, ['rpm_id=%(rpm_id)i']) - self.assertEqual(delete.values, {'rpm_id': 123}) + self.assertEqual(delete.table, "buildroot_listing") + self.assertEqual(delete.clauses, ["rpm_id=%(rpm_id)i"]) + self.assertEqual(delete.values, {"rpm_id": 123}) delete = self.deletes[2] - self.assertEqual(delete.table, 'archive_rpm_components') - self.assertEqual(delete.clauses, ['rpm_id=%(rpm_id)i']) - self.assertEqual(delete.values, {'rpm_id': 123}) + self.assertEqual(delete.table, "archive_rpm_components") + self.assertEqual(delete.clauses, ["rpm_id=%(rpm_id)i"]) + self.assertEqual(delete.values, {"rpm_id": 123}) delete = self.deletes[3] - self.assertEqual(delete.table, 'rpm_checksum') - self.assertEqual(delete.clauses, ['rpm_id=%(rpm_id)i']) - self.assertEqual(delete.values, {'rpm_id': 123}) + self.assertEqual(delete.table, "rpm_checksum") + self.assertEqual(delete.clauses, ["rpm_id=%(rpm_id)i"]) + self.assertEqual(delete.values, {"rpm_id": 123}) delete = self.deletes[4] - self.assertEqual(delete.table, 'rpminfo') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "rpminfo") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) delete = self.deletes[5] - self.assertEqual(delete.table, 'maven_archives') - self.assertEqual(delete.clauses, ['archive_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "maven_archives") + self.assertEqual(delete.clauses, ["archive_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[6] - self.assertEqual(delete.table, 'win_archives') - self.assertEqual(delete.clauses, ['archive_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "win_archives") + self.assertEqual(delete.clauses, ["archive_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[7] - self.assertEqual(delete.table, 'image_archives') - self.assertEqual(delete.clauses, ['archive_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "image_archives") + self.assertEqual(delete.clauses, ["archive_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[8] - self.assertEqual(delete.table, 'buildroot_archives') - self.assertEqual(delete.clauses, ['archive_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "buildroot_archives") + self.assertEqual(delete.clauses, ["archive_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[9] - self.assertEqual(delete.table, 'archive_rpm_components') - self.assertEqual(delete.clauses, ['archive_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "archive_rpm_components") + self.assertEqual(delete.clauses, ["archive_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[10] - self.assertEqual(delete.table, 'archive_components') - self.assertEqual(delete.clauses, ['archive_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "archive_components") + self.assertEqual(delete.clauses, ["archive_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[11] - self.assertEqual(delete.table, 'archive_components') - self.assertEqual(delete.clauses, ['component_id=%(archive_id)i']) - self.assertEqual(delete.values, {'archive_id': 9999}) + self.assertEqual(delete.table, "archive_components") + self.assertEqual(delete.clauses, ["component_id=%(archive_id)i"]) + self.assertEqual(delete.values, {"archive_id": 9999}) delete = self.deletes[12] - self.assertEqual(delete.table, 'archiveinfo') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "archiveinfo") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) delete = self.deletes[13] - self.assertEqual(delete.table, 'maven_builds') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "maven_builds") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) delete = self.deletes[14] - self.assertEqual(delete.table, 'win_builds') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "win_builds") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) delete = self.deletes[15] - self.assertEqual(delete.table, 'image_builds') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "image_builds") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) delete = self.deletes[16] - self.assertEqual(delete.table, 'build_types') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "build_types") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) delete = self.deletes[17] - self.assertEqual(delete.table, 'tag_listing') - self.assertEqual(delete.clauses, ['build_id=%(id)i']) - self.assertEqual(delete.values, {'id': self.binfo['build_id']}) + self.assertEqual(delete.table, "tag_listing") + self.assertEqual(delete.clauses, ["build_id=%(id)i"]) + self.assertEqual(delete.values, {"id": self.binfo["build_id"]}) - self.get_build.assert_has_calls([mock.call(self.build_id), - mock.call(self.build_id, strict=True)]) + self.get_build.assert_has_calls( + [mock.call(self.build_id), mock.call(self.build_id, strict=True)] + ) def test_reset_build_non_exist_build(self): self.get_build.return_value = None diff --git a/vm/kojikamid.py b/vm/kojikamid.py index 4f08b94..fe271e2 100755 --- a/vm/kojikamid.py +++ b/vm/kojikamid.py @@ -377,6 +377,12 @@ class WindowsBuild(object): brtype = brinfo.get('type', 'win') buildinfo = self.server.getLatestBuild(self.build_tag, buildreq, self.task_opts.get('repo_id')) + # don't allow draft build in buildrequires + # TODO: remove it when ready for win build + if buildinfo.get('draft'): + raise BuildError( # noqa: F821 + "Draft build: %s is not supported in buildrequires" % buildinfo['nvr'] + ) br_dir = os.path.join(self.buildreq_dir, buildreq, brtype) ensuredir(br_dir) # noqa: F821 brinfo['dir'] = br_dir diff --git a/www/kojiweb/buildinfo.chtml b/www/kojiweb/buildinfo.chtml index d905007..e7a45e8 100644 --- a/www/kojiweb/buildinfo.chtml +++ b/www/kojiweb/buildinfo.chtml @@ -24,6 +24,11 @@ Epoch$build.epoch + #if $build.draft + DraftTrue + #else + DraftFalse + #end if #if $build.get('source') Source$build['source'] @@ -92,6 +97,14 @@ Completed$util.formatTimeLong($build.completion_ts) #end if + #if $build.promotion_ts + + Promoted$util.formatTimeLong($build.promotion_ts) + + + Promoted by$util.escapeHTML($build.promoter_name) + + #end if #if $build.cg_id Content generator$util.escapeHTML($build.cg_name) diff --git a/www/kojiweb/rpminfo.chtml b/www/kojiweb/rpminfo.chtml index 79e822a..40a3eb0 100644 --- a/www/kojiweb/rpminfo.chtml +++ b/www/kojiweb/rpminfo.chtml @@ -14,6 +14,11 @@ ID$rpm.id + #if $build + + Build$koji.buildLabel($build) + + #end if #if $build Name$util.escapeHTML($rpm.name) @@ -37,6 +42,12 @@ Arch$util.escapeHTML($rpm.arch) + + #if $rpm.draft + DraftTrue + #else + DraftFalse + #end if #if $rpm.external_repo_id == 0 Summary$util.escapeHTML($summary) diff --git a/www/kojiweb/rpmlist.chtml b/www/kojiweb/rpmlist.chtml index 6ba83aa..0586275 100644 --- a/www/kojiweb/rpmlist.chtml +++ b/www/kojiweb/rpmlist.chtml @@ -65,7 +65,7 @@ colspan="2" #slurp #for $rpm in $rpms #set $epoch = ($rpm.epoch != None and $str($rpm.epoch) + ':' or '') - $util.escapeHTML($rpm.name)-$epoch$rpm.version-$rpm.release.${rpm.arch}.rpm + $util.formatRPM($rpm) #if $type in ['component', 'image'] #if $rpm.external_repo_id == 0 internal diff --git a/www/lib/kojiweb/util.py b/www/lib/kojiweb/util.py index 5720db8..014add4 100644 --- a/www/lib/kojiweb/util.py +++ b/www/lib/kojiweb/util.py @@ -568,6 +568,25 @@ def formatLink(url): return url +def formatRPM(rpminfo, link=True): + """Format an rpm dict for display""" + rpminfo = rpminfo.copy() + if rpminfo.get('epoch'): + rpminfo['epoch'] = str(rpminfo['epoch']) + ':' + else: + rpminfo['epoch'] = '' + if rpminfo.get('draft'): + rpminfo['suffix'] = f" (draft_{rpminfo.get('build_id', '???')})" + else: + rpminfo['suffix'] = '' + label = escapeHTML("%(name)s-%(epoch)s%(version)s-%(release)s.%(arch)s%(suffix)s" % rpminfo) + if link: + rpm_id = rpminfo['id'] + return f'{label}' + else: + return label + + def rowToggle(template): """If the value of template._rowNum is even, return 'row-even'; if it is odd, return 'row-odd'. Increment the value before checking it.