From 9a0a63739307520ff664f602b2a5fe79312e1ccd Mon Sep 17 00:00:00 2001 From: Tomas Kopecek Date: Nov 07 2017 15:11:29 +0000 Subject: python-modernize -f libmodernize.fixes.fix_dict_six . -w --- diff --git a/hub/kojihub.py b/hub/kojihub.py index f8974ce..b3a023f 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -61,6 +61,7 @@ import koji.xmlrpcplus from koji.context import context from six.moves import zip from six.moves import range +import six try: import json @@ -671,12 +672,12 @@ def _writeInheritanceData(tag_id, changes, clear=False): data[parent_id] = link break if clear: - for link in data.itervalues(): + for link in six.itervalues(data): if not link.get('is_update'): link['delete link'] = True link['is_update'] = True changed = False - for link in data.itervalues(): + for link in six.itervalues(data): if link.get('is_update'): changed = True break @@ -686,17 +687,17 @@ def _writeInheritanceData(tag_id, changes, clear=False): return #check for duplicate priorities pri_index = {} - for link in data.itervalues(): + for link in six.itervalues(data): if link.get('delete link'): continue pri_index.setdefault(link['priority'], []).append(link) - for pri, dups in pri_index.iteritems(): + for pri, dups in six.iteritems(pri_index): if len(dups) <= 1: continue #oops, duplicate entries for a single priority dup_ids = [link['parent_id'] for link in dups] raise koji.GenericError("Inheritance priorities must be unique (pri %s: %r )" % (pri, dup_ids)) - for parent_id, link in data.iteritems(): + for parent_id, link in six.iteritems(data): if not link.get('is_update'): continue # revoke old values @@ -704,7 +705,7 @@ def _writeInheritanceData(tag_id, changes, clear=False): clauses=['tag_id=%(tag_id)s', 'parent_id = %(parent_id)s']) update.make_revoke() update.execute() - for parent_id, link in data.iteritems(): + for parent_id, link in six.iteritems(data): if not link.get('is_update'): continue # skip rest if we are just deleting @@ -1973,7 +1974,7 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True groups.setdefault(grp_id, group) if incl_pkgs: - for group in groups.itervalues(): + for group in six.itervalues(groups): group['packagelist'] = {} fields = ('group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires') q = """ @@ -1995,7 +1996,7 @@ def get_tag_groups(tag, event=None, inherit=True, incl_pkgs=True, incl_reqs=True if incl_reqs: # and now the group reqs - for group in groups.itervalues(): + for group in six.itervalues(groups): group['grouplist'] = {} fields = ('group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg', 'name') q = """SELECT %s FROM group_req_listing JOIN groups on req_id = id @@ -2161,7 +2162,7 @@ def get_all_arches(): #in a perfect world, this list would only include canonical #arches, but not all admins will undertand that. ret[koji.canonArch(arch)] = 1 - return ret.keys() + return list(ret.keys()) def get_active_tasks(host=None): """Return data on tasks that are yet to be run""" @@ -2414,7 +2415,7 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None): os.symlink(relpath, destlink) except: log_error('Error linking %s to %s' % (destlink, relpath)) - for artifact_dir, artifacts in artifact_dirs.iteritems(): + for artifact_dir, artifacts in six.iteritems(artifact_dirs): _write_maven_repo_metadata(artifact_dir, artifacts) koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=with_src, with_debuginfo=with_debuginfo, @@ -2553,7 +2554,7 @@ def repo_references(repo_id): 'host_id': 'host_id', 'create_event': 'create_event', 'state': 'state'} - fields, aliases = list(zip(*fields.items())) + fields, aliases = list(zip(*list(fields.items()))) values = {'repo_id': repo_id} clauses = ['repo_id=%(repo_id)s', 'retire_event IS NULL'] query = QueryProcessor(columns=fields, aliases=aliases, tables=['standard_buildroot'], @@ -2922,7 +2923,7 @@ def _create_tag(name, parent=None, arches=None, perm=None, locked=False, maven_s # add extra data if extra is not None: - for key, value in extra.iteritems(): + for key, value in six.iteritems(extra): data = { 'tag_id': tag_id, 'key': key, @@ -2987,7 +2988,7 @@ def get_tag(tagInfo, strict=False, event=None): raise koji.GenericError('invalid type for tagInfo: %s' % type(tagInfo)) data = {'tagInfo': tagInfo} - fields, aliases = list(zip(*fields.items())) + fields, aliases = list(zip(*list(fields.items()))) query = QueryProcessor(columns=fields, aliases=aliases, tables=tables, joins=joins, clauses=clauses, values=data) result = query.executeOne() @@ -5048,7 +5049,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None) policy_data = { 'package': build['name'], - 'buildroots': brmap.values(), + 'buildroots': list(brmap.values()), 'import': True, 'import_type': 'rpm', } @@ -5088,7 +5089,7 @@ def import_build(srpm, rpms, brmap=None, task_id=None, build_id=None, logs=None) import_rpm_file(fn, binfo, rpminfo) add_rpm_sig(rpminfo['id'], koji.rip_rpm_sighdr(fn)) if logs: - for key, files in logs.iteritems(): + for key, files in six.iteritems(logs): if not key: key = None for relpath in files: @@ -6818,7 +6819,7 @@ def query_history(tables=None, **kwargs): fields[r_test] = '_revoked_before_event' if skip: continue - fields, aliases = list(zip(*fields.items())) + fields, aliases = list(zip(*list(fields.items()))) query = QueryProcessor(columns=fields, aliases=aliases, tables=[table], joins=joins, clauses=clauses, values=data) ret[table] = query.iterate() @@ -6957,7 +6958,7 @@ def build_references(build_id, limit=None): idx.setdefault(row['id'], row) if limit is not None and len(idx) > limit: break - ret['rpms'] = idx.values() + ret['rpms'] = list(idx.values()) ret['component_of'] = [] # find images/archives that contain the build rpms @@ -6988,7 +6989,7 @@ def build_references(build_id, limit=None): idx.setdefault(row['id'], row) if limit is not None and len(idx) > limit: break - ret['archives'] = idx.values() + ret['archives'] = list(idx.values()) # find images/archives that contain the build archives fields = ['archive_id'] @@ -7294,7 +7295,7 @@ def get_notification_recipients(build, tag_id, state): #FIXME - if tag_id is None, we don't have a good way to get the package owner. # using all package owners from all tags would be way overkill. - emails_uniq = dict([(x, 1) for x in emails]).keys() + emails_uniq = list(dict([(x, 1) for x in emails]).keys()) return emails_uniq def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success=False, failure_msg=''): @@ -7317,7 +7318,7 @@ def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_s from_tag = get_tag(from_id) for email in get_notification_recipients(build, from_tag['id'], state): recipients[email] = 1 - recipients_uniq = recipients.keys() + recipients_uniq = list(recipients.keys()) if len(recipients_uniq) > 0 and not (is_successful and ignore_success): task_id = make_task('tagNotification', [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg]) return task_id @@ -7535,8 +7536,8 @@ class InsertProcessor(object): if not self.data and not self.rawdata: return "-- incomplete update: no assigns" parts = ['INSERT INTO %s ' % self.table] - columns = self.data.keys() - columns.extend(self.rawdata.keys()) + columns = list(self.data.keys()) + columns.extend(list(self.rawdata.keys())) parts.append("(%s) " % ', '.join(columns)) values = [] for key in columns: @@ -7579,7 +7580,7 @@ class InsertProcessor(object): del data['create_event'] del data['creator_id'] clauses = ["%s = %%(%s)s" % (k, k) for k in data] - query = QueryProcessor(columns=data.keys(), tables=[self.table], + query = QueryProcessor(columns=list(data.keys()), tables=[self.table], clauses=clauses, values=data) if query.execute(): return True @@ -8246,7 +8247,7 @@ class UserInGroupTest(koji.policy.BaseSimpleTest): return False groups = koji.auth.get_user_groups(user['id']) args = self.str.split()[1:] - for group_id, group in groups.iteritems(): + for group_id, group in six.iteritems(groups): for pattern in args: if fnmatch.fnmatch(group, pattern): return True @@ -10061,9 +10062,9 @@ class RootExports(object): userID = get_user(userID, strict=True)['id'] if pkgID is not None: pkgID = get_package_id(pkgID, strict=True) - result_list = readPackageList(tagID=tagID, userID=userID, pkgID=pkgID, + result_list = list(readPackageList(tagID=tagID, userID=userID, pkgID=pkgID, inherit=inherited, with_dups=with_dups, - event=event).values() + event=event).values()) if with_dups: # when with_dups=True, readPackageList returns a list of list of dicts # convert it to a list of dicts for consistency @@ -11667,7 +11668,7 @@ class HostExports(object): safer_move(fn, dest) os.symlink(dest, fn) if logs: - for key, files in logs.iteritems(): + for key, files in six.iteritems(logs): if key: logdir = "%s/logs/%s" % (dir, key) else: @@ -11690,7 +11691,7 @@ class HostExports(object): scratchdir = koji.pathinfo.scratch() username = get_user(task.getOwner())['name'] destdir = os.path.join(scratchdir, username, 'task_%s' % task_id) - for reldir, files in results['files'].items() + [('', results['logs'])]: + for reldir, files in list(results['files'].items()) + [('', results['logs'])]: for filename in files: if reldir: relpath = os.path.join(reldir, filename) @@ -11722,7 +11723,7 @@ class HostExports(object): scratchdir = koji.pathinfo.scratch() username = get_user(task.getOwner())['name'] destdir = os.path.join(scratchdir, username, 'task_%s' % task_id) - for relpath in results['output'].keys() + results['logs']: + for relpath in list(results['output'].keys()) + results['logs']: filename = os.path.join(koji.pathinfo.task(results['task_id']), relpath) dest = os.path.join(destdir, relpath) koji.ensuredir(os.path.dirname(dest)) @@ -11911,7 +11912,7 @@ class HostExports(object): maven_task_id = maven_results['task_id'] maven_buildroot_id = maven_results['buildroot_id'] maven_task_dir = koji.pathinfo.task(maven_task_id) - for relpath, files in maven_results['files'].iteritems(): + for relpath, files in six.iteritems(maven_results['files']): dir_maven_info = maven_info poms = [f for f in files if f.endswith('.pom')] if len(poms) == 0: @@ -12075,7 +12076,7 @@ class HostExports(object): task_dir = koji.pathinfo.task(results['task_id']) # import the build output - for relpath, metadata in results['output'].iteritems(): + for relpath, metadata in six.iteritems(results['output']): archivetype = get_archive_type(relpath) if not archivetype: # Unknown archive type, fail the build @@ -12303,7 +12304,7 @@ class HostExports(object): for dep in extra_deps: if isinstance(dep, (int, long)): task_output = list_task_output(dep, stat=True) - for filepath, filestats in task_output.iteritems(): + for filepath, filestats in six.iteritems(task_output): if os.path.splitext(filepath)[1] in ['.log', '.md5', '.sha1']: continue tokens = filepath.split('/') @@ -12336,7 +12337,7 @@ class HostExports(object): logger.error("Current build is %s, new build is %s.", idx_build, archive['build_id']) maven_build_index[archive['group_id']][archive['artifact_id']][archive['version']] = archive['build_id'] - ignore.extend(task_deps.values()) + ignore.extend(list(task_deps.values())) SNAPSHOT_RE = re.compile(r'-\d{8}\.\d{6}-\d+') ignore_by_label = {} @@ -12389,7 +12390,7 @@ class HostExports(object): if build_id: build = get_build(build_id) logger.error("g:a:v supplied by build %(nvr)s", build) - logger.error("Build supplies %i archives: %r", len(build_archives), build_archives.keys()) + logger.error("Build supplies %i archives: %r", len(build_archives), list(build_archives.keys())) if tag_archive: logger.error("Size mismatch, br: %i, db: %i", fileinfo['size'], tag_archive['size']) raise koji.BuildrootError('Unknown file in build environment: %s, size: %s' % \ @@ -12467,7 +12468,7 @@ class HostExports(object): repodir = koji.pathinfo.repo(repo_id, rinfo['tag_name']) workdir = koji.pathinfo.work() if not rinfo['dist']: - for arch, (uploadpath, files) in data.iteritems(): + for arch, (uploadpath, files) in six.iteritems(data): archdir = "%s/%s" % (repodir, koji.canonArch(arch)) if not os.path.isdir(archdir): raise koji.GenericError("Repo arch directory missing: %s" % archdir) diff --git a/hub/kojixmlrpc.py b/hub/kojixmlrpc.py index 88925a1..5aeb583 100644 --- a/hub/kojixmlrpc.py +++ b/hub/kojixmlrpc.py @@ -40,6 +40,7 @@ import koji.util from koji.xmlrpcplus import getparser, dumps, Fault, ExtendedMarshaller from koji.context import context from six.moves import range +import six class Marshaller(ExtendedMarshaller): @@ -98,7 +99,7 @@ class HandlerRegistry(object): Handlers are functions marked with one of the decorators defined in koji.plugin """ - for v in vars(plugin).itervalues(): + for v in six.itervalues(vars(plugin)): if isinstance(v, type): #skip classes continue @@ -155,7 +156,7 @@ class HandlerRegistry(object): return args def system_listMethods(self): - return self.funcs.keys() + return list(self.funcs.keys()) def system_methodSignature(self, method): #it is not possible to autogenerate this data @@ -477,7 +478,7 @@ def load_config(environ): opts['policy'] = dict(config.items('policy')) else: opts['policy'] = {} - for pname, text in _default_policies.iteritems(): + for pname, text in six.iteritems(_default_policies): opts['policy'].setdefault(pname, text) # use configured KojiDir if opts.get('KojiDir') is not None: @@ -545,12 +546,12 @@ def get_policy(opts, plugins): continue alltests.append(koji.policy.findSimpleTests(vars(plugin))) policy = {} - for pname, text in opts['policy'].iteritems(): + for pname, text in six.iteritems(opts['policy']): #filter/merge tests merged = {} for tests in alltests: # tests can be limited to certain policies by setting a class variable - for name, test in tests.iteritems(): + for name, test in six.iteritems(tests): if hasattr(test, 'policy'): if isinstance(test.policy, basestring): if pname != test.policy: diff --git a/koji/auth.py b/koji/auth.py index 6f43159..706f6bc 100644 --- a/koji/auth.py +++ b/koji/auth.py @@ -104,7 +104,7 @@ class Session(object): 'user_id': 'user_id', } # sort for stability (unittests) - fields, aliases = list(zip(*list(sorted(fields.items(), key=lambda x: x[1])))) + fields, aliases = list(zip(*list(sorted(list(fields.items()), key=lambda x: x[1])))) q = """ SELECT %s FROM sessions WHERE id = %%(id)i diff --git a/koji/context.py b/koji/context.py index ef35a21..707c626 100755 --- a/koji/context.py +++ b/koji/context.py @@ -27,6 +27,7 @@ from __future__ import absolute_import import six.moves._thread from six.moves import range +import six class _data(object): pass @@ -67,7 +68,7 @@ class ThreadLocal(object): id = six.moves._thread.get_ident() tdict = object.__getattribute__(self, '_tdict') return "(current thread: %s) {" % id + \ - ", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in tdict.iteritems()]) + \ + ", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in six.iteritems(tdict)]) + \ "}" def _threadclear(self): diff --git a/koji/daemon.py b/koji/daemon.py index dda3b0a..c41d2f2 100644 --- a/koji/daemon.py +++ b/koji/daemon.py @@ -37,6 +37,7 @@ import sys import traceback import errno from six.moves import range +import six def incremental_upload(session, fname, fd, path, retries=5, logger=None): @@ -557,7 +558,7 @@ class TaskManager(object): """Attempt to shut down cleanly""" for task_id in self.pids.keys(): self.cleanupTask(task_id) - self.session.host.freeTasks(self.tasks.keys()) + self.session.host.freeTasks(list(self.tasks.keys())) self.session.host.updateHost(task_load=0.0, ready=False) def updateBuildroots(self, nolocal=False): @@ -588,14 +589,14 @@ class TaskManager(object): #task not running - expire the buildroot #TODO - consider recycling hooks here (with strong sanity checks) self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br) - self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, self.tasks.keys())) + self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, list(self.tasks.keys()))) self.session.host.setBuildRootState(id, st_expired) continue if nolocal: return local_br = self._scanLocalBuildroots() # get info on local_only buildroots (most likely expired) - local_only = [id for id in local_br.iterkeys() if id not in db_br] + local_only = [id for id in six.iterkeys(local_br) if id not in db_br] if local_only: missed_br = self.session.listBuildroots(buildrootID=tuple(local_only)) #get all the task info in one call @@ -827,7 +828,7 @@ class TaskManager(object): # Note: we may still take an assigned task below #sort available capacities for each of our bins avail = {} - for bin in bins.iterkeys(): + for bin in six.iterkeys(bins): avail[bin] = [host['capacity'] - host['task_load'] for host in bin_hosts[bin]] avail[bin].sort() avail[bin].reverse() diff --git a/koji/policy.py b/koji/policy.py index 0842d84..5cb2047 100644 --- a/koji/policy.py +++ b/koji/policy.py @@ -17,10 +17,12 @@ # Authors: # Mike McLean +from __future__ import absolute_import import fnmatch import logging import koji +import six class BaseSimpleTest(object): @@ -292,7 +294,7 @@ class SimpleRuleSet(object): index[name] = 1 index = {} _recurse(self.ruleset, index) - return index.keys() + return list(index.keys()) def _apply(self, rules, data, top=False): for tests, negate, action in rules: @@ -361,7 +363,7 @@ def findSimpleTests(namespace): namespace = (namespace,) ret = {} for ns in namespace: - for key, value in ns.iteritems(): + for key, value in six.iteritems(ns): if value is BaseSimpleTest: # skip this abstract base class if we encounter it # this module contains generic tests, so it is valid to include it diff --git a/plugins/hub/echo.py b/plugins/hub/echo.py index 6727d41..38c96ba 100644 --- a/plugins/hub/echo.py +++ b/plugins/hub/echo.py @@ -8,7 +8,7 @@ from koji.plugin import callbacks, callback, ignore_error import logging -@callback(*callbacks.keys()) +@callback(*list(callbacks.keys())) @ignore_error def echo(cbtype, *args, **kws): logging.getLogger('koji.plugin.echo').info('Called the %s callback, args: %s; kws: %s', diff --git a/tests/test_lib/test_utils.py b/tests/test_lib/test_utils.py index fa94afe..18b5225 100644 --- a/tests/test_lib/test_utils.py +++ b/tests/test_lib/test_utils.py @@ -12,6 +12,7 @@ import calendar import six.moves.configparser import koji import koji.util +import six class EnumTestCase(unittest.TestCase): @@ -764,24 +765,24 @@ class MavenUtilTestCase(unittest.TestCase): name, release, date = 'fedora', 26, datetime.now().strftime('%Y%m%d') data = {'name': name, 'release': release, 'date': date} - six.assertCountEqual(self, data.items(), ldict.items()) - six.assertCountEqual(self, data.items(), [v for v in ldict.iteritems()]) + six.assertCountEqual(self, list(data.items()), list(ldict.items())) + six.assertCountEqual(self, list(data.items()), [v for v in six.iteritems(ldict)]) name, release, date = 'rhel', 7, '20171012' - six.assertCountEqual(self, [name, release, date], ldict.values()) - six.assertCountEqual(self, [name, release, date], [v for v in ldict.itervalues()]) + six.assertCountEqual(self, [name, release, date], list(ldict.values())) + six.assertCountEqual(self, [name, release, date], [v for v in six.itervalues(ldict)]) data = {'name': name, 'release': release, 'date': date} self.assertEqual(name, ldict.pop('name')) data.pop('name') - six.assertCountEqual(self, data.items(), ldict.items()) + six.assertCountEqual(self, list(data.items()), list(ldict.items())) (key, value) = ldict.popitem() data.pop(key) - six.assertCountEqual(self, data.items(), ldict.items()) + six.assertCountEqual(self, list(data.items()), list(ldict.items())) ldict_copy = ldict.copy() - six.assertCountEqual(self, data.items(), ldict_copy.items()) + six.assertCountEqual(self, list(data.items()), list(ldict_copy.items())) def test_LazyRecord(self): """Test LazyRecord object""" @@ -858,7 +859,7 @@ class MavenUtilTestCase(unittest.TestCase): actual = koji.util.eventFromOpts(session, opts) self.assertNotEqual(None, actual) - six.assertCountEqual(self, expect.items(), actual.items()) + six.assertCountEqual(self, list(expect.items()), list(actual.items())) # no event is matched case opts = mock.MagicMock(event=0, ts=0, repo=0) diff --git a/tests/test_plugins/test_runroot_builder.py b/tests/test_plugins/test_runroot_builder.py index 21ffc12..c300a60 100644 --- a/tests/test_plugins/test_runroot_builder.py +++ b/tests/test_plugins/test_runroot_builder.py @@ -69,7 +69,7 @@ class FakeConfigParser(object): return def sections(self): - return self.CONFIG.keys() + return list(self.CONFIG.keys()) def has_option(self, section, key): return section in self.CONFIG and key in self.CONFIG[section] diff --git a/vm/kojikamid.py b/vm/kojikamid.py index 36491fa..fe70dd6 100755 --- a/vm/kojikamid.py +++ b/vm/kojikamid.py @@ -26,6 +26,7 @@ # kojiwind --install # in a cygwin shell. +from __future__ import absolute_import from optparse import OptionParser from six.moves.configparser import ConfigParser import os @@ -42,6 +43,7 @@ import threading import re import glob import zipfile +import six MANAGER_PORT = 7000 @@ -639,7 +641,7 @@ def stream_logs(server, handler, builds): logpath = os.path.join(build.source_dir, relpath) if logpath not in logs: logs[logpath] = (relpath, None) - for log, (relpath, fd) in logs.iteritems(): + for log, (relpath, fd) in six.iteritems(logs): if not fd: if os.path.isfile(log): try: diff --git a/www/kojiweb/index.py b/www/kojiweb/index.py index aa5708d..c003d51 100644 --- a/www/kojiweb/index.py +++ b/www/kojiweb/index.py @@ -39,6 +39,7 @@ from kojiweb.util import _getValidTokens from koji.util import sha1_constructor from six.moves import zip from six.moves import range +import six # Convenience definition of a commonly-used sort function _sortbyname = kojiweb.util.sortByKeyFunc('name') @@ -684,7 +685,7 @@ def taskinfo(environ, taskID): values['pathinfo'] = pathinfo paths = [] # (volume, relpath) tuples - for relname, volumes in server.listTaskOutput(task['id'], all_volumes=True).iteritems(): + for relname, volumes in six.iteritems(server.listTaskOutput(task['id'], all_volumes=True)): paths += [(volume, relname) for volume in volumes] values['output'] = sorted(paths, cmp = _sortByExtAndName) if environ['koji.currentUser']: @@ -703,8 +704,8 @@ def taskstatus(environ, taskID): return '' files = server.listTaskOutput(taskID, stat=True, all_volumes=True) output = '%i:%s\n' % (task['id'], koji.TASK_STATES[task['state']]) - for filename, volumes_data in files.iteritems(): - for volume, file_stats in volumes_data.iteritems(): + for filename, volumes_data in six.iteritems(files): + for volume, file_stats in six.iteritems(volumes_data): output += '%s:%s:%s\n' % (volume, filename, file_stats['st_size']) return output @@ -2108,7 +2109,7 @@ def buildsbytarget(environ, days='7', start=None, order='-builds'): if builds > maxBuilds: maxBuilds = builds - kojiweb.util.paginateList(values, targets.values(), start, 'targets', 'target', order) + kojiweb.util.paginateList(values, list(targets.values()), start, 'targets', 'target', order) values['order'] = order diff --git a/www/kojiweb/wsgi_publisher.py b/www/kojiweb/wsgi_publisher.py index 98d75f6..a2c9a6d 100644 --- a/www/kojiweb/wsgi_publisher.py +++ b/www/kojiweb/wsgi_publisher.py @@ -399,7 +399,7 @@ class Dispatcher(object): if isinstance(result, basestring): headers.setdefault('content-length', ('Content-Length', str(len(result)))) headers.setdefault('content-type', ('Content-Type', 'text/html')) - headers = headers.values() + extra + headers = list(headers.values()) + extra self.logger.debug("Headers:") self.logger.debug(koji.util.LazyString(pprint.pformat, [headers])) start_response(status, headers)