From 2460a00117f8f7c9a64c96415cf638d9385a824b Mon Sep 17 00:00:00 2001 From: Mike McLean Date: Feb 12 2019 16:58:01 +0000 Subject: PR#921: Py3 hub Merges #921 https://pagure.io/koji/pull-request/921 Fixes: #905 https://pagure.io/koji/issue/905 python3 hub --- diff --git a/.coveragerc3 b/.coveragerc3 index b2b47e0..6fa6072 100644 --- a/.coveragerc3 +++ b/.coveragerc3 @@ -5,7 +5,6 @@ omit = /usr/* tests/* - hub/* util/* [report] diff --git a/Makefile b/Makefile index f88379c..155ec9d 100644 --- a/Makefile +++ b/Makefile @@ -80,10 +80,10 @@ test3: coverage3 erase PYTHONPATH=hub/.:plugins/hub/.:plugins/builder/.:plugins/cli/.:cli/. coverage3 run \ --rcfile .coveragerc3 --source . \ - /usr/bin/nosetests \ - tests/test_lib tests/test_cli - coverage3 report --rcfile .coveragerc3 - coverage3 html --rcfile .coveragerc3 + /usr/bin/nosetests-3 \ + tests/test_lib tests/test_cli tests/test_hub + coverage report --rcfile .coveragerc3 + coverage html --rcfile .coveragerc3 @echo Full coverage report at file://${PWD}/htmlcov/index.html test-tarball: diff --git a/hub/kojihub.py b/hub/kojihub.py index 86b6633..46b8f3e 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -42,13 +42,14 @@ import tarfile import tempfile import time import traceback -import urlparse import six.moves.xmlrpc_client import zipfile import rpm import six +from six.moves.urllib.parse import parse_qs + import koji import koji.auth import koji.db @@ -2390,9 +2391,8 @@ def repo_init(tag, with_src=False, with_debuginfo=False, event=None): groupsdir = "%s/groups" % (repodir) koji.ensuredir(groupsdir) comps = koji.generate_comps(groups, expand_groups=True) - fo = open("%s/comps.xml" % groupsdir, 'w') - fo.write(comps) - fo.close() + with open("%s/comps.xml" % groupsdir, 'w') as fo: + fo.write(comps) #get build dirs relpathinfo = koji.PathInfo(topdir='toplink') @@ -2505,9 +2505,8 @@ def _write_maven_repo_metadata(destdir, artifacts): """ % datetime.datetime.now().strftime('%Y%m%d%H%M%S') - mdfile = open(os.path.join(destdir, 'maven-metadata.xml'), 'w') - mdfile.write(contents) - mdfile.close() + with open(os.path.join(destdir, 'maven-metadata.xml'), 'w') as mdfile: + mdfile.write(contents) _generate_maven_metadata(destdir) def dist_repo_init(tag, keys, task_opts): @@ -4352,14 +4351,16 @@ def _get_zipfile_list(archive_id, zippath): result = [] if not os.path.exists(zippath): return result - archive = zipfile.ZipFile(zippath, 'r') - for entry in archive.infolist(): - filename = koji.fixEncoding(entry.filename) - result.append({'archive_id': archive_id, - 'name': filename, - 'size': entry.file_size, - 'mtime': int(time.mktime(entry.date_time + (0, 0, -1)))}) - archive.close() + with zipfile.ZipFile(zippath, 'r') as archive: + for entry in archive.infolist(): + if six.PY2: + filename = koji.fixEncoding(entry.filename) + else: + filename = entry.filename + result.append({'archive_id': archive_id, + 'name': filename, + 'size': entry.file_size, + 'mtime': int(time.mktime(entry.date_time + (0, 0, -1)))}) return result def _get_tarball_list(archive_id, tarpath): @@ -4378,17 +4379,19 @@ def _get_tarball_list(archive_id, tarpath): result = [] if not os.path.exists(tarpath): return result - archive = tarfile.open(tarpath, 'r') - for entry in archive: - filename = koji.fixEncoding(entry.name) - result.append({'archive_id': archive_id, - 'name': filename, - 'size': entry.size, - 'mtime': entry.mtime, - 'mode': entry.mode, - 'user': entry.uname, - 'group': entry.gname}) - archive.close() + with tarfile.open(tarpath, 'r') as archive: + for entry in archive: + if six.PY2: + filename = koji.fixEncoding(entry.name) + else: + filename = entry.name + result.append({'archive_id': archive_id, + 'name': filename, + 'size': entry.size, + 'mtime': entry.mtime, + 'mode': entry.mode, + 'user': entry.uname, + 'group': entry.gname}) return result def list_archive_files(archive_id, queryOpts=None, strict=False): @@ -5515,9 +5518,8 @@ class CG_Importer(object): path = os.path.join(workdir, directory, metadata) if not os.path.exists(path): raise koji.GenericError("No such file: %s" % metadata) - fo = open(path, 'rb') - metadata = fo.read() - fo.close() + with open(path, 'rt') as fo: + metadata = fo.read() self.raw_metadata = metadata self.metadata = parse_json(metadata, desc='metadata') return self.metadata @@ -5657,11 +5659,8 @@ class CG_Importer(object): builddir = koji.pathinfo.build(self.buildinfo) koji.ensuredir(builddir) path = os.path.join(builddir, 'metadata.json') - fo = open(path, 'w') - try: + with open(path, 'w') as fo: fo.write(self.raw_metadata) - finally: - fo.close() def prep_brs(self): @@ -6156,16 +6155,16 @@ def merge_scratch(task_id): raise koji.ImportError('SCM URLs for the task and build do not match: %s, %s' % \ (task_info['request'][0], build_task_info['request'][0])) build_arches = set() - for rpm in list_rpms(buildID=build['id']): - if rpm['arch'] == 'src': - build_srpm = '%s.src.rpm' % rpm['nvr'] + for rpminfo in list_rpms(buildID=build['id']): + if rpminfo['arch'] == 'src': + build_srpm = '%s.src.rpm' % rpminfo['nvr'] if srpm != build_srpm: raise koji.ImportError('task and build srpm names do not match: %s, %s' % \ (srpm, build_srpm)) - elif rpm['arch'] == 'noarch': + elif rpminfo['arch'] == 'noarch': continue else: - build_arches.add(rpm['arch']) + build_arches.add(rpminfo['arch']) if not build_arches: raise koji.ImportError('no arch-specific rpms found for %s' % build['nvr']) task_arches = set([t['arch'] for t in tasks.values()]) @@ -6359,7 +6358,10 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No archiveinfo = {'buildroot_id': buildroot_id} archiveinfo['build_id'] = buildinfo['id'] if metadata_only: - filename = koji.fixEncoding(fileinfo['filename']) + if six.PY2: + filename = koji.fixEncoding(fileinfo['filename']) + else: + filename = fileinfo['filename'] archiveinfo['filename'] = filename archiveinfo['size'] = fileinfo['filesize'] archiveinfo['checksum'] = fileinfo['checksum'] @@ -6370,19 +6372,21 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES[fileinfo['checksum_type']] archiveinfo['metadata_only'] = True else: - filename = koji.fixEncoding(os.path.basename(filepath)) + if six.PY2: + filename = koji.fixEncoding(os.path.basename(filepath)) + else: + filename = os.path.basename(filepath) archiveinfo['filename'] = filename archiveinfo['size'] = os.path.getsize(filepath) # trust values computed on hub (CG_Importer.prep_outputs) if not fileinfo or not fileinfo.get('hub.checked_md5'): - archivefp = open(filepath) - m = md5_constructor() - while True: - contents = archivefp.read(8192) - if not contents: - break - m.update(contents) - archivefp.close() + with open(filepath, 'rb') as archivefp: + m = md5_constructor() + while True: + contents = archivefp.read(8192) + if not contents: + break + m.update(contents) archiveinfo['checksum'] = m.hexdigest() else: archiveinfo['checksum'] = fileinfo['checksum'] @@ -6492,8 +6496,10 @@ def _import_archive_file(filepath, destdir): A symlink pointing from the old location to the new location will be created. """ - final_path = "%s/%s" % (destdir, - koji.fixEncoding(os.path.basename(filepath))) + fname = os.path.basename(filepath) + if six.PY2: + fname = koji.fixEncoding(fname) + final_path = "%s/%s" % (destdir, fname) if os.path.exists(final_path): raise koji.GenericError("Error importing archive file, %s already exists" % final_path) if os.path.islink(filepath) or not os.path.isfile(filepath): @@ -6515,16 +6521,14 @@ def _generate_maven_metadata(mavendir): sumfile = mavenfile + ext if sumfile not in mavenfiles: sum = sum_constr() - fobj = open('%s/%s' % (mavendir, mavenfile)) - while True: - content = fobj.read(8192) - if not content: - break - sum.update(content) - fobj.close() - sumobj = open('%s/%s' % (mavendir, sumfile), 'w') - sumobj.write(sum.hexdigest()) - sumobj.close() + with open('%s/%s' % (mavendir, mavenfile), 'rb') as fobj: + while True: + content = fobj.read(8192) + if not content: + break + sum.update(content) + with open('%s/%s' % (mavendir, sumfile), 'w') as sumobj: + sumobj.write(sum.hexdigest()) def add_rpm_sig(an_rpm, sighdr): """Store a signature header for an rpm""" @@ -6578,9 +6582,8 @@ def add_rpm_sig(an_rpm, sighdr): # - write to fs sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey)) koji.ensuredir(os.path.dirname(sigpath)) - fo = open(sigpath, 'wb') - fo.write(sighdr) - fo.close() + with open(sigpath, 'wb') as fo: + fo.write(sighdr) koji.plugin.run_callbacks('postRPMSign', sigkey=sigkey, sighash=sighash, build=binfo, rpm=rinfo) def _scan_sighdr(sighdr, fn): @@ -6631,9 +6634,8 @@ def check_rpm_sig(an_rpm, sigkey, sighdr): koji.splice_rpm_sighdr(sighdr, rpm_path, temp) ts = rpm.TransactionSet() ts.setVSFlags(0) #full verify - fo = open(temp, 'rb') - hdr = ts.hdrFromFdno(fo.fileno()) - fo.close() + with open(temp, 'rb') as fo: + hdr = ts.hdrFromFdno(fo.fileno()) except: try: os.unlink(temp) @@ -6694,9 +6696,8 @@ def write_signed_rpm(an_rpm, sigkey, force=False): else: os.unlink(signedpath) sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey)) - fo = open(sigpath, 'rb') - sighdr = fo.read() - fo.close() + with open(sigpath, 'rb') as fo: + sighdr = fo.read() koji.ensuredir(os.path.dirname(signedpath)) koji.splice_rpm_sighdr(sighdr, rpm_path, signedpath) @@ -7682,7 +7683,10 @@ def parse_json(value, desc=None, errstr=None): if value is None: return value try: - return koji.fixEncodingRecurse(json.loads(value)) + if six.PY2: + return koji.fixEncodingRecurse(json.loads(value)) + else: + return json.loads(value) except Exception: if errstr is None: if desc is None: @@ -7720,8 +7724,7 @@ class InsertProcessor(object): if not self.data and not self.rawdata: return "-- incomplete update: no assigns" parts = ['INSERT INTO %s ' % self.table] - columns = to_list(self.data.keys()) - columns.extend(to_list(self.rawdata.keys())) + columns = sorted(to_list(self.data.keys()) + to_list(self.rawdata.keys())) parts.append("(%s) " % ', '.join(columns)) values = [] for key in columns: @@ -7807,10 +7810,10 @@ class UpdateProcessor(object): parts = ['UPDATE %s SET ' % self.table] assigns = ["%s = %%(data.%s)s" % (key, key) for key in self.data] assigns.extend(["%s = (%s)" % (key, self.rawdata[key]) for key in self.rawdata]) - parts.append(', '.join(assigns)) + parts.append(', '.join(sorted(assigns))) if self.clauses: parts.append('\nWHERE ') - parts.append(' AND '.join(["( %s )" % c for c in self.clauses])) + parts.append(' AND '.join(["( %s )" % c for c in sorted(self.clauses)])) return ''.join(parts) def __repr__(self): @@ -7883,12 +7886,23 @@ class QueryProcessor(object): if columns and aliases: if len(columns) != len(aliases): raise Exception('column and alias lists must be the same length') - self.colsByAlias = dict(zip(aliases, columns)) + # reorder + alias_table = sorted(zip(aliases, columns)) + self.aliases = [x[0] for x in alias_table] + self.columns = [x[1] for x in alias_table] + self.colsByAlias = dict(alias_table) else: self.colsByAlias = {} + if columns: + self.columns = sorted(columns) + if aliases: + self.aliases = sorted(aliases) self.tables = tables self.joins = joins - self.clauses = clauses + if clauses: + self.clauses = sorted(clauses) + else: + self.clauses = clauses self.cursors = 0 if values: self.values = values @@ -7926,7 +7940,7 @@ SELECT %(col_str)s col_str = 'count(*)' else: col_str = self._seqtostr(self.columns) - table_str = self._seqtostr(self.tables) + table_str = self._seqtostr(self.tables, sort=True) join_str = self._joinstr() clause_str = self._seqtostr(self.clauses, sep=')\n AND (') if clause_str: @@ -7947,8 +7961,10 @@ SELECT %(col_str)s return '' % \ (self.columns, self.aliases, self.tables, self.joins, self.clauses, self.values, self.opts) - def _seqtostr(self, seq, sep=', '): + def _seqtostr(self, seq, sep=', ', sort=False): if seq: + if sort: + seq = sorted(seq) return sep.join(seq) else: return '' @@ -9194,15 +9210,14 @@ class RootExports(object): if not os.path.isfile(filePath): raise koji.GenericError('no file "%s" output by task %i' % (fileName, taskID)) # Let the caller handler any IO or permission errors - f = open(filePath, 'r') - if isinstance(offset, str): - offset = int(offset) - if offset != None and offset > 0: - f.seek(offset, 0) - elif offset != None and offset < 0: - f.seek(offset, 2) - contents = f.read(size) - f.close() + with open(filePath, 'r') as f: + if isinstance(offset, str): + offset = int(offset) + if offset != None and offset > 0: + f.seek(offset, 0) + elif offset != None and offset < 0: + f.seek(offset, 2) + contents = f.read(size) return base64.encodestring(contents) listTaskOutput = staticmethod(list_task_output) @@ -9672,8 +9687,9 @@ class RootExports(object): for (cltime, clname, cltext) in zip(fields['changelogtime'], fields['changelogname'], fields['changelogtext']): cldate = datetime.datetime.fromtimestamp(cltime).isoformat(' ') - clname = koji.fixEncoding(clname) - cltext = koji.fixEncoding(cltext) + if six.PY2: + clname = koji.fixEncoding(clname) + cltext = koji.fixEncoding(cltext) if author and author != clname: continue @@ -9688,7 +9704,10 @@ class RootExports(object): results.append({'date': cldate, 'date_ts': cltime, 'author': clname, 'text': cltext}) results = _applyQueryOpts(results, queryOpts) - return koji.fixEncodingRecurse(results, remove_nonprintable=True) + if six.PY2: + return koji.fixEncodingRecurse(results, remove_nonprintable=True) + else: + return results def cancelBuild(self, buildID): """Cancel the build with the given buildID @@ -11029,23 +11048,15 @@ class RootExports(object): else: return 1 - def _sortByKeyFunc(self, key, noneGreatest=True): + def _sortByKeyFuncNoneGreatest(key): """Return a function to sort a list of maps by the given key. - If the key starts with '-', sort in reverse order. If noneGreatest - is True, None will sort higher than all other values (instead of lower). + None will sort higher than all other values (instead of lower). """ - if noneGreatest: - # Normally None evaluates to be less than every other value - # Invert the comparison so it always evaluates to greater - cmpFunc = lambda a, b: (a is None or b is None) and -(cmp(a, b)) or cmp(a, b) - else: - cmpFunc = cmp - - if key.startswith('-'): - key = key[1:] - return lambda a, b: cmpFunc(b[key], a[key]) - else: - return lambda a, b: cmpFunc(a[key], b[key]) + def internal_key(obj): + v = obj[key] + # Nones has priority, others are second + return (v is None, v) + return internal_key def filterResults(self, methodName, *args, **kw): """Execute the XML-RPC method with the given name and filter the results @@ -11100,7 +11111,15 @@ class RootExports(object): order = filterOpts.get('order') if order: - results.sort(self._sortByKeyFunc(order, filterOpts.get('noneGreatest', True))) + if order.startswith('-'): + reverse = True + order = order[1:] + else: + reverse = False + if filterOpts.get('noneGreatest', True): + results.sort(self._sortByKeyFuncNoneGreatest(order), reverse=reverse) + else: + results.sort(key=order, reverse=reverse) offset = filterOpts.get('offset') if offset is not None: @@ -12950,9 +12969,8 @@ def get_upload_path(reldir, name, create=False, volume=None): if context.session.user_id != user_id: raise koji.GenericError("Invalid upload directory, not owner: %s" % orig_reldir) else: - fo = open(u_fn, 'w') - fo.write(str(context.session.user_id)) - fo.close() + with open(u_fn, 'w') as fo: + fo.write(str(context.session.user_id)) return os.path.join(udir, name) def get_verify_class(verify): @@ -12972,7 +12990,7 @@ def handle_upload(environ): start = time.time() if not context.session.logged_in: raise koji.ActionNotAllowed('you must be logged-in to upload a file') - args = urlparse.parse_qs(environ.get('QUERY_STRING', ''), strict_parsing=True) + args = parse_qs(environ.get('QUERY_STRING', ''), strict_parsing=True) #XXX - already parsed by auth name = args['filename'][0] path = args.get('filepath', ('',))[0] diff --git a/hub/kojixmlrpc.py b/hub/kojixmlrpc.py index 8f15cdb..9b8ef93 100644 --- a/hub/kojixmlrpc.py +++ b/hub/kojixmlrpc.py @@ -51,7 +51,10 @@ class Marshaller(ExtendedMarshaller): def dump_datetime(self, value, write): # For backwards compatibility, we return datetime objects as strings value = value.isoformat(' ') - self.dump_string(value, write) + if six.PY2: + self.dump_string(value, write) + else: + self.dump_unicode(value, write) dispatch[datetime.datetime] = dump_datetime @@ -363,6 +366,8 @@ def offline_reply(start_response, msg=None): else: faultString = msg response = dumps(Fault(faultCode, faultString)) + if six.PY3: + response = response.encode() headers = [ ('Content-Length', str(len(response))), ('Content-Type', "text/xml"), @@ -628,7 +633,7 @@ def setup_logging2(opts): name = 'koji' + name elif not name.startswith('koji'): name = 'koji.' + name - level_code = logging._levelNames[level] + level_code = logging.getLevelName(level) logging.getLogger(name).setLevel(level_code) logger = logging.getLogger("koji") # if KojiDebug is set, force main log level to DEBUG @@ -699,6 +704,8 @@ def application(environ, start_response): ] start_response('405 Method Not Allowed', headers) response = "Method Not Allowed\nThis is an XML-RPC server. Only POST requests are accepted." + if six.PY3: + response = response.encode() headers = [ ('Content-Length', str(len(response))), ('Content-Type', "text/plain"), @@ -728,6 +735,8 @@ def application(environ, start_response): response = h._wrap_handler(h.handle_upload, environ) else: response = h._wrap_handler(h.handle_rpc, environ) + if six.PY3: + response = response.encode() headers = [ ('Content-Length', str(len(response))), ('Content-Type', "text/xml"), diff --git a/koji.spec b/koji.spec index 907b2d8..b274853 100644 --- a/koji.spec +++ b/koji.spec @@ -123,6 +123,20 @@ Plugins to the koji command-line interface %package hub Summary: Koji XMLRPC interface Group: Applications/Internet +License: LGPLv2 +Requires: %{name} = %{version}-%{release} +Requires: %{name}-hub-code +%if 0%{?fedora} || 0%{?rhel} > 7 +Suggests: python%{python3_pkgversion}-%{name}-hub +Suggests: python%{python3_pkgversion}-%{name}-hub-plugins +%endif + +%description hub +koji-hub is the XMLRPC interface to the koji database + +%package -n python2-%{name}-hub +Summary: Koji XMLRPC interface +Group: Applications/Internet License: LGPLv2 and GPLv2 # rpmdiff lib (from rpmlint) is GPLv2 (only) Requires: httpd @@ -134,25 +148,78 @@ Requires: python-psycopg2 Requires: %{name} = %{version}-%{release} # we need the python2 lib here Requires: python2-%{name} = %{version}-%{release} +# py2 xor py3 +Provides: %{name}-hub-code = %{version}-%{release} -%description hub +%description -n python2-%{name}-hub +koji-hub is the XMLRPC interface to the koji database + +%if 0%{with python3} +%package -n python%{python3_pkgversion}-%{name}-hub +Summary: Koji XMLRPC interface +Group: Applications/Internet +License: LGPLv2 and GPLv2 +# rpmdiff lib (from rpmlint) is GPLv2 (only) +Requires: httpd +Requires: mod_wsgi +%if 0%{?fedora} >= 21 || 0%{?rhel} >= 7 +Requires: mod_auth_gssapi +%endif +Requires: python-psycopg2 +Requires: %{name} = %{version}-%{release} +# we need the python2 lib here +Requires: python%{python3_pkgversion}-%{name} = %{version}-%{release} +# py2 xor py3 +Provides: %{name}-hub-code = %{version}-%{release} + +%description -n python%{python3_pkgversion}-%{name}-hub koji-hub is the XMLRPC interface to the koji database +%endif %package hub-plugins Summary: Koji hub plugins Group: Applications/Internet License: LGPLv2 -Requires: %{name} = %{version}-%{release} -Requires: %{name}-hub = %{version}-%{release} +Requires: %{name}-hub-plugins-code +%if 0%{?fedora} || 0%{?rhel} > 7 +Suggests: python%{python3_pkgversion}-%{name}-hub-plugins +%endif + +%description hub-plugins +Plugins to the koji XMLRPC interface + +%package -n python2-%{name}-hub-plugins +Summary: Koji hub plugins +Group: Applications/Internet +License: LGPLv2 +Requires: python2-%{name}-hub = %{version}-%{release} Requires: python-qpid >= 0.7 -%if 0%{?rhel} >= 6 +%if 0%{?fedora} >= 27 || 0%{?rhel} >= 6 Requires: python-qpid-proton %endif Requires: cpio +Provides: %{name}-hub-plugins-code -%description hub-plugins +%description -n python2-%{name}-hub-plugins Plugins to the koji XMLRPC interface +%if 0%{with python3} +%package -n python%{python3_pkgversion}-%{name}-hub-plugins +Summary: Koji hub plugins +Group: Applications/Internet +License: LGPLv2 +Requires: python%{python3_pkgversion}-%{name}-hub = %{version}-%{release} +Requires: python-qpid >= 0.7 +%if 0%{?fedora} >= 27 || 0%{?rhel} >= 6 +Requires: python%{python3_pkgversion}-qpid-proton +%endif +Requires: cpio +Provides: %{name}-hub-plugins-code + +%description -n python%{python3_pkgversion}-%{name}-hub-plugins +Plugins to the koji XMLRPC interface +%endif + %package builder-plugins Summary: Koji builder plugins Group: Applications/Internet @@ -270,12 +337,11 @@ koji-web is a web UI to the Koji system. rm -rf $RPM_BUILD_ROOT make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python2} %{?install_opt} install %if 0%{with python3} -cd koji -make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install -cd ../cli -make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install -cd ../plugins -make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install +for d in koji cli plugins hub ; do + pushd $d + make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install + popd +done # alter python interpreter in koji CLI sed -i 's/\#\!\/usr\/bin\/python2/\#\!\/usr\/bin\/python3/' $RPM_BUILD_ROOT/usr/bin/koji %endif @@ -319,20 +385,39 @@ rm -rf $RPM_BUILD_ROOT %files hub %defattr(-,root,root) -%{_datadir}/koji-hub -%dir %{_libexecdir}/koji-hub %config(noreplace) /etc/httpd/conf.d/kojihub.conf %dir /etc/koji-hub %config(noreplace) /etc/koji-hub/hub.conf %dir /etc/koji-hub/hub.conf.d -%files hub-plugins +%files -n python2-%{name}-hub %defattr(-,root,root) -%dir %{_prefix}/lib/koji-hub-plugins -%{_prefix}/lib/koji-hub-plugins/*.py* +%{_datadir}/koji-hub/*.py* +%dir %{_libexecdir}/koji-hub + +%if 0%{with python3} +%files -n python%{python3_pkgversion}-%{name}-hub +%defattr(-,root,root) +%{_datadir}/koji-hub/*.py +%{_datadir}/koji-hub/__pycache__ +%dir %{_libexecdir}/koji-hub +%endif + +%files hub-plugins %dir /etc/koji-hub/plugins %config(noreplace) /etc/koji-hub/plugins/*.conf +%files -n python2-%{name}-hub-plugins +%defattr(-,root,root) +%{_prefix}/lib/koji-hub-plugins/*.py* + +%if 0%{with python3} +%files -n python%{python3_pkgversion}-%{name}-hub-plugins +%defattr(-,root,root) +%{_prefix}/lib/koji-hub-plugins/*.py +%{_prefix}/lib/koji-hub-plugins/__pycache__ +%endif + %files builder-plugins %defattr(-,root,root) %dir /etc/kojid/plugins diff --git a/koji/__init__.py b/koji/__init__.py index 05c8e5f..bd9ec69 100644 --- a/koji/__init__.py +++ b/koji/__init__.py @@ -890,6 +890,22 @@ def get_rpm_header(f, ts=None): return hdr +def _decode_item(item): + """Decode rpm header byte strings to str in py3""" + if six.PY2: + return item + elif isinstance(item, bytes): + try: + return item.decode() + except UnicodeDecodeError: + # typically signatures + return item + elif isinstance(item, list): + return [_decode_item(x) for x in item] + else: + return item + + def get_header_field(hdr, name, src_arch=False): """Extract named field from an rpm header""" name = name.upper() @@ -913,12 +929,6 @@ def get_header_field(hdr, name, src_arch=False): result = [] elif isinstance(result, six.integer_types): result = [result] - if six.PY3 and isinstance(result, bytes): - try: - result = result.decode('utf-8') - except UnicodeDecodeError: - # typically signatures - pass sizetags = ('SIZE', 'ARCHIVESIZE', 'FILESIZES', 'SIGSIZE') if name in sizetags and (result is None or result == []): @@ -928,7 +938,7 @@ def get_header_field(hdr, name, src_arch=False): # no such header pass - return result + return _decode_item(result) def _get_header_field(hdr, name): diff --git a/koji/db.py b/koji/db.py index c4e9ab3..3f238a8 100644 --- a/koji/db.py +++ b/koji/db.py @@ -23,7 +23,6 @@ from __future__ import absolute_import import logging -import sys import psycopg2 # import psycopg2.extensions # # don't convert timestamp fields to DateTime objects @@ -32,10 +31,11 @@ import psycopg2 # del psycopg2.extensions.string_types[1082] # del psycopg2.extensions.string_types[1083] # del psycopg2.extensions.string_types[1266] +import re +import sys import time import traceback from . import context -import re POSITIONAL_RE = re.compile(r'%[a-z]') NAMED_RE = re.compile(r'%\(([^\)]+)\)[a-z]') diff --git a/plugins/Makefile b/plugins/Makefile index f775bda..0d97881 100644 --- a/plugins/Makefile +++ b/plugins/Makefile @@ -29,16 +29,16 @@ install: echo "ERROR: A destdir is required"; \ exit 1; \ fi + mkdir -p $(DESTDIR)/$(HUBPLUGINDIR); \ + install -p -m 644 $(HUBFILES) $(DESTDIR)/$(HUBPLUGINDIR); \ + $(PYTHON) -c "import compileall; compileall.compile_dir('$(DESTDIR)/$(HUBPLUGINDIR)', 1, '$(HUBPLUGINDIR)', 1)"; \ + mkdir -p $(DESTDIR)/$(HUBCONFDIR); \ + install -p -m 644 $(HUBCONFFILES) $(DESTDIR)/$(HUBCONFDIR); \ if [ "$(PYMAJORVER)" == "2" ] ; then \ - mkdir -p $(DESTDIR)/$(HUBPLUGINDIR); \ mkdir -p $(DESTDIR)/$(BUILDERPLUGINDIR); \ - install -p -m 644 $(HUBFILES) $(DESTDIR)/$(HUBPLUGINDIR); \ install -p -m 644 $(BUILDERFILES) $(DESTDIR)/$(BUILDERPLUGINDIR); \ - $(PYTHON) -c "import compileall; compileall.compile_dir('$(DESTDIR)/$(HUBPLUGINDIR)', 1, '$(HUBPLUGINDIR)', 1)"; \ $(PYTHON) -c "import compileall; compileall.compile_dir('$(DESTDIR)/$(BUILDERPLUGINDIR)', 1, '$(BUILDERPLUGINDIR)', 1)"; \ - mkdir -p $(DESTDIR)/$(HUBCONFDIR); \ mkdir -p $(DESTDIR)/$(BUILDERCONFDIR); \ - install -p -m 644 $(HUBCONFFILES) $(DESTDIR)/$(HUBCONFDIR); \ install -p -m 644 $(BUILDERCONFFILES) $(DESTDIR)/$(BUILDERCONFDIR); \ fi diff --git a/tests/test_hub/data/image/import_1/db.json b/tests/test_hub/data/image/import_1/db.json index 9ffdfb2..ca9f44e 100644 --- a/tests/test_hub/data/image/import_1/db.json +++ b/tests/test_hub/data/image/import_1/db.json @@ -1,205 +1,205 @@ { "inserts": [ [ - "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", - { - "build_id": 137, - "archive_id": "ARCHIVE_ID", - "type_id": "ARCHIVETYPE", - "checksum": "19a674d997af7098a444b60d7b51cee6", - "filename": "tdl-x86_64.xml", - "checksum_type": 0, - "btype_id": "BTYPEID:image", - "buildroot_id": null, - "id": 1001, + "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)", + { + "build_id": 137, + "archive_id": "ARCHIVE_ID", + "type_id": "ARCHIVETYPE", + "checksum": "19a674d997af7098a444b60d7b51cee6", + "filename": "tdl-x86_64.xml", + "checksum_type": 0, + "btype_id": "BTYPEID:image", + "buildroot_id": null, + "id": 1001, "size": 36 - }, + }, {} - ], + ], [ - "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", + "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)", { - "archive_id": 1001, + "archive_id": 1001, "arch": "x86_64" - }, + }, {} - ], + ], [ - "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", + "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)", { - "build_id": 137, - "archive_id": "ARCHIVE_ID", - "type_id": "ARCHIVETYPE", - "checksum": "a5114a20d790cf17eca1b1115a4546f8", - "filename": "image.ks", - "checksum_type": 0, - "btype_id": "BTYPEID:image", - "buildroot_id": null, - "id": 1002, + "build_id": 137, + "archive_id": "ARCHIVE_ID", + "type_id": "ARCHIVETYPE", + "checksum": "a5114a20d790cf17eca1b1115a4546f8", + "filename": "image.ks", + "checksum_type": 0, + "btype_id": "BTYPEID:image", + "buildroot_id": null, + "id": 1002, "size": 30 - }, + }, {} - ], + ], [ - "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", + "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)", { - "archive_id": 1002, + "archive_id": 1002, "arch": "x86_64" - }, + }, {} - ], + ], [ - "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", + "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)", { - "build_id": 137, - "archive_id": "ARCHIVE_ID", - "type_id": "ARCHIVETYPE", - "checksum": "9828cf75d9d17ac8e79e53ed71c6a71c", - "filename": "image-base.ks", - "checksum_type": 0, - "btype_id": "BTYPEID:image", - "buildroot_id": null, - "id": 1003, + "build_id": 137, + "archive_id": "ARCHIVE_ID", + "type_id": "ARCHIVETYPE", + "checksum": "9828cf75d9d17ac8e79e53ed71c6a71c", + "filename": "image-base.ks", + "checksum_type": 0, + "btype_id": "BTYPEID:image", + "buildroot_id": null, + "id": 1003, "size": 35 - }, + }, {} - ], + ], [ - "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", + "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)", { - "archive_id": 1003, + "archive_id": 1003, "arch": "x86_64" - }, + }, {} - ], + ], [ - "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", + "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)", { - "build_id": 137, - "archive_id": "ARCHIVE_ID", - "type_id": "ARCHIVETYPE", - "checksum": "f601c0f647d7cdd4c92aa511876f8533", - "filename": "foo-x86_64.xml", - "checksum_type": 0, - "btype_id": "BTYPEID:image", - "buildroot_id": null, - "id": 1004, + "build_id": 137, + "archive_id": "ARCHIVE_ID", + "type_id": "ARCHIVETYPE", + "checksum": "f601c0f647d7cdd4c92aa511876f8533", + "filename": "foo-x86_64.xml", + "checksum_type": 0, + "btype_id": "BTYPEID:image", + "buildroot_id": null, + "id": 1004, "size": 36 - }, + }, {} - ], + ], [ - "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", + "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)", { - "archive_id": 1004, + "archive_id": 1004, "arch": "x86_64" - }, + }, {} - ], + ], [ - "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", + "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)", { - "build_id": 137, - "archive_id": "ARCHIVE_ID", - "type_id": "ARCHIVETYPE", - "checksum": "84547200ef5002292ecdd50c62de518e", - "filename": "my-image-7.4.2-2.x86_64.ova", - "checksum_type": 0, - "btype_id": "BTYPEID:image", - "buildroot_id": null, - "id": 1005, + "build_id": 137, + "archive_id": "ARCHIVE_ID", + "type_id": "ARCHIVETYPE", + "checksum": "84547200ef5002292ecdd50c62de518e", + "filename": "my-image-7.4.2-2.x86_64.ova", + "checksum_type": 0, + "btype_id": "BTYPEID:image", + "buildroot_id": null, + "id": 1005, "size": 49 - }, + }, {} - ], + ], [ - "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", + "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)", { - "archive_id": 1005, + "archive_id": 1005, "arch": "x86_64" - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1002, + "archive_id": 1002, "rpm_id": 1000 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1002, + "archive_id": 1002, "rpm_id": 1001 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1002, + "archive_id": 1002, "rpm_id": 1002 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1003, + "archive_id": 1003, "rpm_id": 1000 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1003, + "archive_id": 1003, "rpm_id": 1001 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1003, + "archive_id": 1003, "rpm_id": 1002 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1005, + "archive_id": 1005, "rpm_id": 1000 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1005, + "archive_id": 1005, "rpm_id": 1001 - }, + }, {} - ], + ], [ - "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", + "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", { - "archive_id": 1005, + "archive_id": 1005, "rpm_id": 1002 - }, + }, {} ] - ], + ], "updates": [ [ - "UPDATE build SET state = %(data.state)s, id = %(data.id)s, completion_time = (now())\nWHERE ( id=%(build_id)i )", + "UPDATE build SET completion_time = (now()), id = %(data.id)s, state = %(data.state)s\nWHERE ( id=%(build_id)i )", { - "state": 1, + "state": 1, "id": "BUILD_ID" - }, + }, { "completion_time": "now()" } diff --git a/tests/test_hub/test_get_build_logs.py b/tests/test_hub/test_get_build_logs.py index c03ff29..30e4d20 100644 --- a/tests/test_hub/test_get_build_logs.py +++ b/tests/test_hub/test_get_build_logs.py @@ -18,6 +18,7 @@ class TestGetBuildLogs(unittest.TestCase): self.pathinfo = mock.patch('koji.pathinfo').start() self.tempdir = tempfile.mkdtemp() koji.pathinfo.build_logs.return_value = self.tempdir + koji.pathinfo.topdir = '/' def tearDown(self): mock.patch.stopall() diff --git a/tests/test_hub/test_get_upload_path.py b/tests/test_hub/test_get_upload_path.py index 374b968..671e657 100644 --- a/tests/test_hub/test_get_upload_path.py +++ b/tests/test_hub/test_get_upload_path.py @@ -47,7 +47,7 @@ class TestGetUploadPath(unittest.TestCase): fullpath = '%s/work/%s' % (self.topdir, reldir) os.makedirs(fullpath) - with open('{0}/.user'.format(fullpath), 'wb') as f: + with open('{0}/.user'.format(fullpath), 'wt') as f: f.write('1') with self.assertRaises(GenericError): diff --git a/tests/test_hub/test_import_build.py b/tests/test_hub/test_import_build.py index 22f1186..2422d9a 100644 --- a/tests/test_hub/test_import_build.py +++ b/tests/test_hub/test_import_build.py @@ -76,18 +76,18 @@ class TestImportRPM(unittest.TestCase): _singleValue.return_value = 9876 kojihub.import_rpm(self.filename) fields = [ - 'build_id', - 'name', 'arch', + 'build_id', + 'buildroot_id', 'buildtime', - 'payloadhash', 'epoch', - 'version', - 'buildroot_id', - 'release', 'external_repo_id', 'id', + 'name', + 'payloadhash', + 'release', 'size', + 'version', ] statement = 'INSERT INTO rpminfo (%s) VALUES (%s)' % ( ", ".join(fields), @@ -135,18 +135,18 @@ class TestImportRPM(unittest.TestCase): _singleValue.return_value = 9876 kojihub.import_rpm(self.src_filename) fields = [ - 'build_id', - 'name', 'arch', + 'build_id', + 'buildroot_id', 'buildtime', - 'payloadhash', 'epoch', - 'version', - 'buildroot_id', - 'release', 'external_repo_id', 'id', + 'name', + 'payloadhash', + 'release', 'size', + 'version', ] statement = 'INSERT INTO rpminfo (%s) VALUES (%s)' % ( ", ".join(fields), @@ -253,19 +253,19 @@ class TestImportBuild(unittest.TestCase): kojihub.import_build(self.src_filename, [self.filename]) fields = [ - 'task_id', + 'completion_time', + 'epoch', 'extra', + 'id', + 'owner', + 'pkg_id', + 'release', + 'source', 'start_time', - 'epoch', - 'completion_time', 'state', + 'task_id', 'version', - 'source', 'volume_id', - 'owner', - 'release', - 'pkg_id', - 'id', ] statement = 'INSERT INTO build (%s) VALUES (%s)' % ( ", ".join(fields), diff --git a/tests/test_hub/test_list_archives.py b/tests/test_hub/test_list_archives.py index 1f529ee..60694af 100644 --- a/tests/test_hub/test_list_archives.py +++ b/tests/test_hub/test_list_archives.py @@ -141,8 +141,8 @@ class TestListArchives(DBQueryTestCase): joins=['archivetypes on archiveinfo.type_id = archivetypes.id', 'btype ON archiveinfo.btype_id = btype.id', 'maven_archives ON archiveinfo.id = maven_archives.archive_id'], - clauses=['maven_archives.group_id = %(group_id)s', - 'maven_archives.artifact_id = %(artifact_id)s', + clauses=['maven_archives.artifact_id = %(artifact_id)s', + 'maven_archives.group_id = %(group_id)s', 'maven_archives.version = %(version)s'], values={'group_id': 'gid', 'artifact_id': 'aid', @@ -171,13 +171,15 @@ class TestListArchives(DBQueryTestCase): 'platforms': 'all', 'flags': ['A', 'B']}) self.assertLastQueryEqual(tables=['archiveinfo'], - joins=['archivetypes on archiveinfo.type_id = archivetypes.id', + joins=sorted([ + 'archivetypes on archiveinfo.type_id = archivetypes.id', 'btype ON archiveinfo.btype_id = btype.id', - 'win_archives ON archiveinfo.id = win_archives.archive_id'], - clauses=['win_archives.relpath = %(relpath)s', + 'win_archives ON archiveinfo.id = win_archives.archive_id']), + clauses=sorted([ + 'win_archives.relpath = %(relpath)s', r"platforms ~ E'\\mall\\M'", r"flags ~ E'\\mA\\M'", - r"flags ~ E'\\mB\\M'"], + r"flags ~ E'\\mB\\M'"]), values={'relpath': 'somerelpath'}, colsByAlias={'relpath': 'win_archives.relpath', 'platforms': 'win_archives.platforms', diff --git a/tests/test_hub/test_list_channels.py b/tests/test_hub/test_list_channels.py index 595e05a..78149e7 100644 --- a/tests/test_hub/test_list_channels.py +++ b/tests/test_hub/test_list_channels.py @@ -37,10 +37,10 @@ class TestListChannels(unittest.TestCase): self.assertEqual(len(self.queries), 1) query = self.queries[0] self.assertEqual(query.tables, ['channels']) - self.assertEqual(query.aliases, ('name', 'id')) + self.assertEqual(query.aliases, ['id', 'name']) self.assertEqual(query.joins, None) self.assertEqual(query.values, {}) - self.assertEqual(query.columns, ('channels.name', 'channels.id')) + self.assertEqual(query.columns, ['channels.id', 'channels.name']) self.assertEqual(query.clauses, None) def test_host(self): @@ -54,10 +54,10 @@ class TestListChannels(unittest.TestCase): 'host_channels.host_id = %(host_id)s' ] self.assertEqual(query.tables, ['host_channels']) - self.assertEqual(query.aliases, ('name', 'id')) + self.assertEqual(query.aliases, ['id', 'name']) self.assertEqual(query.joins, joins) self.assertEqual(query.values, {'host_id': 1234}) - self.assertEqual(query.columns, ('channels.name', 'channels.id')) + self.assertEqual(query.columns, ['channels.id', 'channels.name']) self.assertEqual(query.clauses, clauses) def test_host_and_event(self): @@ -71,10 +71,10 @@ class TestListChannels(unittest.TestCase): 'host_channels.host_id = %(host_id)s', ] self.assertEqual(query.tables, ['host_channels']) - self.assertEqual(query.aliases, ('name', 'id')) + self.assertEqual(query.aliases, ['id', 'name']) self.assertEqual(query.joins, joins) self.assertEqual(query.values, {'host_id': 1234}) - self.assertEqual(query.columns, ('channels.name', 'channels.id')) + self.assertEqual(query.columns, ['channels.id', 'channels.name']) self.assertEqual(query.clauses, clauses) def test_event_only(self): diff --git a/tests/test_hub/test_list_hosts.py b/tests/test_hub/test_list_hosts.py index 60161e7..a2b8ecd 100644 --- a/tests/test_hub/test_list_hosts.py +++ b/tests/test_hub/test_list_hosts.py @@ -59,9 +59,9 @@ class TestListHosts(unittest.TestCase): self.assertEqual(query.joins, ['host ON host.id = host_config.host_id', 'host_channels ON host.id = host_channels.host_id']) self.assertEqual(query.clauses, [ - 'host_config.active IS TRUE', - 'host_channels.channel_id = %(channelID)i', 'host_channels.active IS TRUE', + 'host_channels.channel_id = %(channelID)i', + 'host_config.active IS TRUE', ]) def test_list_hosts_single_arch(self): @@ -71,7 +71,8 @@ class TestListHosts(unittest.TestCase): query = self.queries[0] self.assertEqual(query.tables, ['host_config']) self.assertEqual(query.joins, ['host ON host.id = host_config.host_id']) - self.assertEqual(query.clauses, ['host_config.active IS TRUE',r"""(arches ~ E'\\mx86_64\\M')"""]) + self.assertEqual(query.clauses, [r"""(arches ~ E'\\mx86_64\\M')""", + 'host_config.active IS TRUE']) def test_list_hosts_multi_arch(self): self.exports.listHosts(arches=['x86_64', 's390']) @@ -80,7 +81,9 @@ class TestListHosts(unittest.TestCase): query = self.queries[0] self.assertEqual(query.tables, ['host_config']) self.assertEqual(query.joins, ['host ON host.id = host_config.host_id']) - self.assertEqual(query.clauses, ['host_config.active IS TRUE',r"""(arches ~ E'\\mx86_64\\M' OR arches ~ E'\\ms390\\M')"""]) + self.assertEqual(query.clauses, [ + r"""(arches ~ E'\\mx86_64\\M' OR arches ~ E'\\ms390\\M')""", + 'host_config.active IS TRUE']) def test_list_hosts_bad_arch(self): with self.assertRaises(koji.GenericError): @@ -111,7 +114,7 @@ class TestListHosts(unittest.TestCase): query = self.queries[0] self.assertEqual(query.tables, ['host_config']) self.assertEqual(query.joins, ['host ON host.id = host_config.host_id']) - self.assertEqual(query.clauses, ['host_config.active IS TRUE','enabled IS TRUE']) + self.assertEqual(query.clauses, ['enabled IS TRUE', 'host_config.active IS TRUE']) def test_list_hosts_disabled(self): self.exports.listHosts(enabled=0) @@ -120,4 +123,4 @@ class TestListHosts(unittest.TestCase): query = self.queries[0] self.assertEqual(query.tables, ['host_config']) self.assertEqual(query.joins, ['host ON host.id = host_config.host_id']) - self.assertEqual(query.clauses, ['host_config.active IS TRUE','enabled IS FALSE']) + self.assertEqual(query.clauses, ['enabled IS FALSE', 'host_config.active IS TRUE']) diff --git a/tests/test_hub/test_notifications.py b/tests/test_hub/test_notifications.py index 0cea57c..d45c138 100644 --- a/tests/test_hub/test_notifications.py +++ b/tests/test_hub/test_notifications.py @@ -72,13 +72,13 @@ class TestNotifications(unittest.TestCase): # only query to watchers self.assertEqual(len(self.queries), 1) q = self.queries[0] - self.assertEqual(q.columns, ('email',)) + self.assertEqual(q.columns, ['email']) self.assertEqual(q.tables, ['build_notifications']) - self.assertEqual(q.clauses, [ 'status = %(users_status)i', - 'usertype IN %(users_usertypes)s', - 'package_id IS NULL', + self.assertEqual(q.clauses, ['package_id IS NULL', + 'status = %(users_status)i', + 'success_only = FALSE', 'tag_id IS NULL', - 'success_only = FALSE']) + 'usertype IN %(users_usertypes)s']) self.assertEqual(q.joins, ['JOIN users ON build_notifications.user_id = users.id']) self.assertEqual(q.values['state'], state) self.assertEqual(q.values['build'], build) @@ -96,13 +96,13 @@ class TestNotifications(unittest.TestCase): # there should be only query to watchers self.assertEqual(len(self.queries), 1) q = self.queries[0] - self.assertEqual(q.columns, ('email',)) + self.assertEqual(q.columns, ['email']) self.assertEqual(q.tables, ['build_notifications']) - self.assertEqual(q.clauses, ['status = %(users_status)i', - 'usertype IN %(users_usertypes)s', - 'package_id = %(package_id)i OR package_id IS NULL', + self.assertEqual(q.clauses, ['package_id = %(package_id)i OR package_id IS NULL', + 'status = %(users_status)i', + 'success_only = FALSE', 'tag_id IS NULL', - 'success_only = FALSE']) + 'usertype IN %(users_usertypes)s']) self.assertEqual(q.joins, ['JOIN users ON build_notifications.user_id = users.id']) self.assertEqual(q.values['package_id'], build['package_id']) self.assertEqual(q.values['state'], state) @@ -134,19 +134,19 @@ class TestNotifications(unittest.TestCase): } emails = kojihub.get_notification_recipients(build, tag_id, state) - self.assertEqual(emails, ['owner_name@test.domain.com', 'pkg_owner_name@test.domain.com']) + self.assertEqual(sorted(emails), ['owner_name@test.domain.com', 'pkg_owner_name@test.domain.com']) # there should be only query to watchers self.assertEqual(len(self.queries), 1) q = self.queries[0] - self.assertEqual(q.columns, ('email',)) + self.assertEqual(q.columns, ['email']) self.assertEqual(q.tables, ['build_notifications']) - self.assertEqual(q.clauses, ['status = %(users_status)i', - 'usertype IN %(users_usertypes)s', - 'package_id = %(package_id)i OR package_id IS NULL', + self.assertEqual(q.clauses, ['package_id = %(package_id)i OR package_id IS NULL', + 'status = %(users_status)i', + 'success_only = FALSE', 'tag_id = %(tag_id)i OR tag_id IS NULL', - 'success_only = FALSE']) + 'usertype IN %(users_usertypes)s']) self.assertEqual(q.joins, ['JOIN users ON build_notifications.user_id = users.id']) self.assertEqual(q.values['package_id'], build['package_id']) self.assertEqual(q.values['state'], state) diff --git a/tests/test_hub/test_rpmdiff.py b/tests/test_hub/test_rpmdiff.py index 16a784a..b69e7f5 100644 --- a/tests/test_hub/test_rpmdiff.py +++ b/tests/test_hub/test_rpmdiff.py @@ -90,7 +90,7 @@ class TestRPMDiff(unittest.TestCase): rpm = os.path.join(data_path, 'test-pkg-1.0.0-1.el7.noarch.rpm') # dummy file info - defattr = [19L, 33188, 1531970408, 0, 0, 2, 1, -1, -1, 'root', 'root', '02d2c91b'] + defattr = [19, 33188, 1531970408, 0, 0, 2, 1, -1, -1, 'root', 'root', '02d2c91b'] rpm_dict_old = {'a_file': defattr } @@ -113,7 +113,7 @@ class TestRPMDiff(unittest.TestCase): self.assertEqual(diff.textdiff(), textdiff if token not in opt else '') # case 1 size diffrerent - check_diff_result('S', 0, 99L, "S.......... a_file") + check_diff_result('S', 0, 99, "S.......... a_file") # case 2 mode different check_diff_result('M', 1, 22188, ".M......... a_file") diff --git a/tests/test_hub/test_write_maven_repo_metadata.py b/tests/test_hub/test_write_maven_repo_metadata.py index d990e68..6f90385 100644 --- a/tests/test_hub/test_write_maven_repo_metadata.py +++ b/tests/test_hub/test_write_maven_repo_metadata.py @@ -7,7 +7,6 @@ try: except ImportError: import unittest -import koji from kojihub import _write_maven_repo_metadata class TestWriteMavenRepoMetadata(unittest.TestCase): @@ -39,7 +38,7 @@ class TestWriteMavenRepoMetadata(unittest.TestCase): openf_mock.assert_called_with( os.path.join(destdir, 'maven-metadata.xml'), 'w') - handle = openf_mock() + handle = openf_mock().__enter__() expected = """\