From b59b41b2fc9ddd843e8616c5b658cc1098cb854d Mon Sep 17 00:00:00 2001 From: Yu Ming Zhu Date: Jun 23 2020 09:28:13 +0000 Subject: [PATCH 1/5] web: use sha1 for token generation instead of md5(disabled by FIPS) fixes: #2291 --- diff --git a/www/lib/kojiweb/util.py b/www/lib/kojiweb/util.py index d90f5cc..093c9cf 100644 --- a/www/lib/kojiweb/util.py +++ b/www/lib/kojiweb/util.py @@ -173,7 +173,7 @@ def _genToken(environ, tstamp=None): tstamp = _truncTime() value = user + str(tstamp) + environ['koji.options']['Secret'].value value = value.encode('utf-8') - return hashlib.md5(value).hexdigest()[-8:] + return hashlib.sha1(value).hexdigest()[-8:] def _getValidTokens(environ): From ad6b38707bd41bf650a37e3bb53960a92091955e Mon Sep 17 00:00:00 2001 From: Yu Ming Zhu Date: Jun 23 2020 09:28:13 +0000 Subject: [PATCH 2/5] a wrapper ignoring FIPS for hashlib.md5 --- diff --git a/cli/koji_cli/commands.py b/cli/koji_cli/commands.py index 6577677..805a7eb 100644 --- a/cli/koji_cli/commands.py +++ b/cli/koji_cli/commands.py @@ -2,7 +2,6 @@ from __future__ import absolute_import, division import ast import fnmatch -import hashlib import itertools import json import logging @@ -24,7 +23,7 @@ import six.moves.xmlrpc_client from six.moves import filter, map, range, zip import koji -from koji.util import base64encode, to_list +from koji.util import base64encode, md5_constructor, to_list from koji_cli.lib import ( _, _list_tasks, @@ -1498,7 +1497,7 @@ def handle_import_sig(goptions, session, args): previous = session.queryRPMSigs(rpm_id=rinfo['id'], sigkey=sigkey) assert len(previous) <= 1 if previous: - sighash = hashlib.md5(sighdr).hexdigest() + sighash = md5_constructor(sighdr).hexdigest() if previous[0]['sighash'] == sighash: print(_("Signature already imported: %s") % path) continue diff --git a/cli/koji_cli/lib.py b/cli/koji_cli/lib.py index 325ad94..a85e537 100644 --- a/cli/koji_cli/lib.py +++ b/cli/koji_cli/lib.py @@ -18,7 +18,7 @@ from six.moves import range import koji # import parse_arches to current namespace for backward compatibility from koji import parse_arches -from koji.util import to_list +from koji.util import md5_constructor, to_list try: import krbV @@ -612,7 +612,7 @@ def download_archive(build, archive, topurl, quiet=False, noprogress=False): # check checksum/checksum_type if archive['checksum_type'] == koji.CHECKSUM_TYPES['md5']: - hash = hashlib.md5() + hash = md5_constructor() elif archive['checksum_type'] == koji.CHECKSUM_TYPES['sha1']: hash = hashlib.sha1() elif archive['checksum_type'] == koji.CHECKSUM_TYPES['sha256']: diff --git a/hub/kojihub.py b/hub/kojihub.py index 8a5a9bb..3cc5233 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -64,6 +64,7 @@ from koji.util import ( decode_bytes, dslice, joinpath, + md5_constructor, move_and_symlink, multi_fnmatch, safer_move, @@ -6635,7 +6636,7 @@ class CG_Importer(object): # until we change the way we handle checksums, we have to limit this to md5 raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo) with open(path, 'rb') as fp: - m = hashlib.md5() + m = md5_constructor() while True: contents = fp.read(8192) if not contents: @@ -7220,7 +7221,7 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No # trust values computed on hub (CG_Importer.prep_outputs) if not fileinfo or not fileinfo.get('hub.checked_md5'): with open(filepath, 'rb') as archivefp: - m = hashlib.md5() + m = md5_constructor() while True: contents = archivefp.read(8192) if not contents: @@ -7361,7 +7362,7 @@ def _generate_maven_metadata(mavendir): continue if not os.path.isfile('%s/%s' % (mavendir, mavenfile)): continue - for ext, sum_constr in (('.md5', hashlib.md5), ('.sha1', hashlib.sha1)): + for ext, sum_constr in (('.md5', md5_constructor), ('.sha1', hashlib.sha1)): sumfile = mavenfile + ext if sumfile not in mavenfiles: sum = sum_constr() @@ -7411,7 +7412,7 @@ def add_rpm_sig(an_rpm, sighdr): # we use the sigkey='' to represent unsigned in the db (so that uniqueness works) else: sigkey = koji.get_sigpacket_key_id(sigkey) - sighash = hashlib.md5(sighdr).hexdigest() + sighash = md5_constructor(sighdr).hexdigest() rpm_id = rinfo['id'] # - db entry q = """SELECT sighash FROM rpmsigs WHERE rpm_id=%(rpm_id)i AND sigkey=%(sigkey)s""" @@ -14628,7 +14629,7 @@ def get_upload_path(reldir, name, create=False, volume=None): def get_verify_class(verify): if verify == 'md5': - return hashlib.md5 + return md5_constructor elif verify == 'adler32': return koji.util.adler32_constructor elif verify: diff --git a/koji/__init__.py b/koji/__init__.py index ed4c75a..bbd38f8 100644 --- a/koji/__init__.py +++ b/koji/__init__.py @@ -27,7 +27,6 @@ from __future__ import absolute_import, division import base64 import datetime import errno -import hashlib import imp import logging import logging.handlers @@ -3108,7 +3107,7 @@ class ClientSession(object): fo = open(localfile, "rb") # specify bufsize? totalsize = os.path.getsize(localfile) ofs = 0 - md5sum = hashlib.md5() + md5sum = util.md5_constructor() debug = self.opts.get('debug', False) if callback: callback(0, totalsize, 0, 0, 0) @@ -3125,7 +3124,7 @@ class ClientSession(object): sz = ofs else: offset = ofs - digest = hashlib.md5(contents).hexdigest() + digest = util.md5_constructor(contents).hexdigest() sz = size del contents tries = 0 diff --git a/koji/daemon.py b/koji/daemon.py index 9a77f20..08611ab 100644 --- a/koji/daemon.py +++ b/koji/daemon.py @@ -23,7 +23,6 @@ from __future__ import absolute_import, division import errno -import hashlib import logging import os import signal @@ -44,6 +43,7 @@ from koji.util import ( adler32_constructor, base64encode, dslice, + md5_constructor, parseStatus, to_list, joinpath, @@ -69,7 +69,7 @@ def incremental_upload(session, fname, fd, path, retries=5, logger=None): break data = base64encode(contents) - digest = hashlib.md5(contents).hexdigest() + digest = md5_constructor(contents).hexdigest() del contents tries = 0 diff --git a/koji/util.py b/koji/util.py index f2f4c34..803ec36 100644 --- a/koji/util.py +++ b/koji/util.py @@ -45,6 +45,17 @@ import koji from koji.xmlrpcplus import DateTime +# BEGIN kojikamid dup # + +def md5_constructor(*args, **kwargs): + if hasattr(hashlib._hashlib, 'get_fips_mode') and hashlib._hashlib.get_fips_mode(): + # do not care about FIPS + kwargs['usedforsecurity'] = False + return hashlib.md5(*args, **kwargs) + +# END kojikamid dup # + + # imported from kojiweb and kojihub def deprecated(message): """Print deprecation warning""" @@ -583,7 +594,7 @@ def check_sigmd5(filename): f.seek(o) # compute md5 of rest of file - md5 = hashlib.md5() + md5 = md5_constructor() while True: d = f.read(1024**2) if not d: diff --git a/vm/fix_kojikamid.sh b/vm/fix_kojikamid.sh index f0063a7..12c3acd 100755 --- a/vm/fix_kojikamid.sh +++ b/vm/fix_kojikamid.sh @@ -2,7 +2,7 @@ awk '/^# INSERT kojikamid dup #/ {exit} {print $0}' kojikamid.py -for fn in ../koji/__init__.py ../koji/daemon.py +for fn in ../koji/__init__.py ../koji/daemon.py ../koji/util.py do awk '/^# END kojikamid dup #/ {p=0} p {print $0} /^# BEGIN kojikamid dup #/ {p=1}' $fn done diff --git a/vm/kojikamid.py b/vm/kojikamid.py index 7bd9dec..96046a7 100755 --- a/vm/kojikamid.py +++ b/vm/kojikamid.py @@ -333,7 +333,7 @@ class WindowsBuild(object): elif checksum_type == 'sha256': checksum = hashlib.sha256() elif checksum_type == 'md5': - checksum = hashlib.md5() + checksum = md5_constructor.md5() # noqa: F821 else: raise BuildError('Unknown checksum type %s for %s' % ( # noqa: F821 checksum_type, diff --git a/vm/kojivmd b/vm/kojivmd index de06624..6ef6c8f 100755 --- a/vm/kojivmd +++ b/vm/kojivmd @@ -795,7 +795,7 @@ class VMExecTask(BaseTaskHandler): if algo == 'sha1': sum = hashlib.sha1() elif algo == 'md5': - sum = hashlib.md5() + sum = koji.util.md5_constructor() elif algo == 'sha256': sum == hashlib.sha256() else: From f2f26917739a3d3b20bb5b8ebe2ad3d8376fc499 Mon Sep 17 00:00:00 2001 From: Tomas Kopecek Date: Jun 23 2020 09:28:13 +0000 Subject: [PATCH 3/5] replace md5 with sha256 --- diff --git a/hub/kojihub.py b/hub/kojihub.py index 3cc5233..bc0b4f8 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -6631,22 +6631,18 @@ class CG_Importer(object): (filesize, fileinfo['filename'], fileinfo['filesize'])) # checksum - if fileinfo['checksum_type'] != 'md5': - # XXX - # until we change the way we handle checksums, we have to limit this to md5 - raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo) with open(path, 'rb') as fp: - m = md5_constructor() + chksum = get_verify_class(fileinfo['checksum_type'])() while True: contents = fp.read(8192) if not contents: break - m.update(contents) - if fileinfo['checksum'] != m.hexdigest(): + chksum.update(contents) + if fileinfo['checksum'] != chksum.hexdigest(): raise koji.GenericError("File checksum mismatch for %s: %s != %s" % (fileinfo['filename'], fileinfo['checksum'], - m.hexdigest())) - fileinfo['hub.checked_md5'] = True + chksum.hexdigest())) + fileinfo['hub.checked_hash'] = True if fileinfo['buildroot_id'] not in self.br_prep: raise koji.GenericError("Missing buildroot metadata for id %(buildroot_id)r" % @@ -7208,9 +7204,7 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No archiveinfo['filename'] = filename archiveinfo['size'] = fileinfo['filesize'] archiveinfo['checksum'] = fileinfo['checksum'] - if fileinfo['checksum_type'] != 'md5': - # XXX - # until we change the way we handle checksums, we have to limit this to md5 + if fileinfo['checksum_type'] not in ('md5', 'sha256'): raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo) archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES[fileinfo['checksum_type']] archiveinfo['metadata_only'] = True @@ -7219,28 +7213,26 @@ def import_archive_internal(filepath, buildinfo, type, typeInfo, buildroot_id=No archiveinfo['filename'] = filename archiveinfo['size'] = os.path.getsize(filepath) # trust values computed on hub (CG_Importer.prep_outputs) - if not fileinfo or not fileinfo.get('hub.checked_md5'): + if not fileinfo or not fileinfo.get('hub.checked_hash'): with open(filepath, 'rb') as archivefp: - m = md5_constructor() + chksum = get_verify_class('sha256')() while True: contents = archivefp.read(8192) if not contents: break - m.update(contents) - archiveinfo['checksum'] = m.hexdigest() + chksum.update(contents) + archiveinfo['checksum'] = chksum.hexdigest() + archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES['sha256'] else: archiveinfo['checksum'] = fileinfo['checksum'] - archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES['md5'] + archiveinfo['checksum_type'] = fileinfo['checksum_type'] if fileinfo: # check against metadata if archiveinfo['size'] != fileinfo['filesize']: raise koji.GenericError("File size mismatch for %s: %s != %s" % (filename, archiveinfo['size'], fileinfo['filesize'])) - if fileinfo['checksum_type'] != 'md5': - # XXX - # until we change the way we handle checksums, we have to limit this to md5 - raise koji.GenericError("Unsupported checksum type: %(checksum_type)s" % fileinfo) - if archiveinfo['checksum'] != fileinfo['checksum']: + if (archiveinfo['checksum'] != fileinfo['checksum'] or + archiveinfo['checksum_type'] != fileinfo['checksum_type']): raise koji.GenericError("File checksum mismatch for %s: %s != %s" % (filename, archiveinfo['checksum'], fileinfo['checksum'])) archivetype = get_archive_type(filename, strict=True) @@ -10322,13 +10314,14 @@ class RootExports(object): def uploadFile(self, path, name, size, md5sum, offset, data, volume=None): """upload file to the hub - Files can be uploaded in chunks, if so the md5 and size describe the + Files can be uploaded in chunks, if so the hash and size describe the chunk rather than the whole file. :param str path: the relative path to upload to :param str name: the name of the file :param int size: size of contents (bytes) - :param str md5: md5sum (hex digest) of contents + :param str md5sum: md5sum (hex digest) of contents or tuple (hash_type, digest) + md5sum name is misleading, but it is here for backwas compatibility :param str data: base64 encoded file contents :param int offset: The offset indicates where the chunk belongs. The special offset -1 is used to indicate the final @@ -10345,11 +10338,11 @@ class RootExports(object): if isinstance(md5sum, str): # this case is for backwards compatibility verify = "md5" - digest = md5sum - elif md5sum is None: + digest = hash + elif hash is None: verify = None else: - verify, digest = md5sum + verify, digest = hash sum_cls = get_verify_class(verify) if offset != -1: if size is not None: @@ -10447,14 +10440,13 @@ class RootExports(object): data['size'] = st.st_size data['mtime'] = st.st_mtime if verify: - sum_cls = get_verify_class(verify) + chksum = get_verify_class(verify)() if tail is not None: if tail < 0: raise koji.GenericError("invalid tail value: %r" % tail) offset = max(st.st_size - tail, 0) os.lseek(fd, offset, 0) length = 0 - chksum = sum_cls() chunk = os.read(fd, 8192) while chunk: length += len(chunk) @@ -14632,6 +14624,8 @@ def get_verify_class(verify): return md5_constructor elif verify == 'adler32': return koji.util.adler32_constructor + elif verify == 'sha256': + return hashlib.sha256 elif verify: raise koji.GenericError("Unsupported verify type: %s" % verify) else: @@ -14659,9 +14653,8 @@ def handle_upload(environ): raise koji.GenericError("destination not a file: %s" % fn) if offset == 0 and not overwrite: raise koji.GenericError("upload path exists: %s" % fn) - sum_cls = get_verify_class(verify) + chksum = get_verify_class(verify)() size = 0 - chksum = sum_cls() inf = environ['wsgi.input'] fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666) try: diff --git a/koji/__init__.py b/koji/__init__.py index bbd38f8..b95e2d9 100644 --- a/koji/__init__.py +++ b/koji/__init__.py @@ -27,6 +27,7 @@ from __future__ import absolute_import, division import base64 import datetime import errno +import hashlib import imp import logging import logging.handlers @@ -3107,24 +3108,24 @@ class ClientSession(object): fo = open(localfile, "rb") # specify bufsize? totalsize = os.path.getsize(localfile) ofs = 0 - md5sum = util.md5_constructor() + sha256sum = hashlib.sha256sum() debug = self.opts.get('debug', False) if callback: callback(0, totalsize, 0, 0, 0) while True: lap = time.time() contents = fo.read(blocksize) - md5sum.update(contents) + sha256sum.update(contents) size = len(contents) data = util.base64encode(contents) if size == 0: # end of file, use offset = -1 to finalize upload offset = -1 - digest = md5sum.hexdigest() + digest = sha256sum.hexdigest() sz = ofs else: offset = ofs - digest = util.md5_constructor(contents).hexdigest() + digest = hashlib.sha256(contents).hexdigest() sz = size del contents tries = 0 @@ -3132,7 +3133,8 @@ class ClientSession(object): if debug: self.logger.debug("uploadFile(%r,%r,%r,%r,%r,...)" % (path, name, sz, digest, offset)) - if self.callMethod('uploadFile', path, name, sz, digest, offset, data, **volopts): + if self.callMethod('uploadFile', path, name, sz, ("sha256", digest), + offset, data, **volopts): break if tries <= retries: tries += 1 diff --git a/koji/daemon.py b/koji/daemon.py index 08611ab..7897713 100644 --- a/koji/daemon.py +++ b/koji/daemon.py @@ -23,6 +23,7 @@ from __future__ import absolute_import, division import errno +import hashlib import logging import os import signal @@ -43,7 +44,6 @@ from koji.util import ( adler32_constructor, base64encode, dslice, - md5_constructor, parseStatus, to_list, joinpath, @@ -69,12 +69,12 @@ def incremental_upload(session, fname, fd, path, retries=5, logger=None): break data = base64encode(contents) - digest = md5_constructor(contents).hexdigest() + digest = hashlib.sha256(contents).hexdigest() del contents tries = 0 while True: - if session.uploadFile(path, fname, size, digest, offset, data): + if session.uploadFile(path, fname, size, ("sha256", digest), offset, data): break if tries <= retries: diff --git a/koji/util.py b/koji/util.py index 803ec36..1d4da3b 100644 --- a/koji/util.py +++ b/koji/util.py @@ -49,7 +49,8 @@ from koji.xmlrpcplus import DateTime def md5_constructor(*args, **kwargs): if hasattr(hashlib._hashlib, 'get_fips_mode') and hashlib._hashlib.get_fips_mode(): - # do not care about FIPS + # do not care about FIPS we need md5 for signatures and older hashes + # It is still used for *some* security kwargs['usedforsecurity'] = False return hashlib.md5(*args, **kwargs) diff --git a/tests/test_hub/data/image/import_1/db.json b/tests/test_hub/data/image/import_1/db.json index 1cf84da..e29bda8 100644 --- a/tests/test_hub/data/image/import_1/db.json +++ b/tests/test_hub/data/image/import_1/db.json @@ -6,9 +6,9 @@ "build_id": 137, "archive_id": "ARCHIVE_ID", "type_id": "ARCHIVETYPE", - "checksum": "19a674d997af7098a444b60d7b51cee6", + "checksum": "ca9dd08a0b9f81b209c3ac768a7d1ca27973cfd920095e2dc3df5159f752039e", "filename": "tdl-x86_64.xml", - "checksum_type": 0, + "checksum_type": 2, "btype_id": "BTYPEID:image", "buildroot_id": null, "id": 1001, @@ -30,9 +30,9 @@ "build_id": 137, "archive_id": "ARCHIVE_ID", "type_id": "ARCHIVETYPE", - "checksum": "a5114a20d790cf17eca1b1115a4546f8", + "checksum": "4083a6838c1b6895df27a69373f4c527a9722c045bccc08efe064f105d566c77", "filename": "image.ks", - "checksum_type": 0, + "checksum_type": 2, "btype_id": "BTYPEID:image", "buildroot_id": null, "id": 1002, @@ -54,9 +54,9 @@ "build_id": 137, "archive_id": "ARCHIVE_ID", "type_id": "ARCHIVETYPE", - "checksum": "9828cf75d9d17ac8e79e53ed71c6a71c", + "checksum": "963a4396be7072012370db407b9ea3633b09dbe45926bb2ef912a86baac1d7b7", "filename": "image-base.ks", - "checksum_type": 0, + "checksum_type": 2, "btype_id": "BTYPEID:image", "buildroot_id": null, "id": 1003, @@ -78,9 +78,9 @@ "build_id": 137, "archive_id": "ARCHIVE_ID", "type_id": "ARCHIVETYPE", - "checksum": "f601c0f647d7cdd4c92aa511876f8533", + "checksum": "9f4dea3a4b64def36be0119fef4d3f6e62eb6e316bf5749acddb134596faf5e9", "filename": "foo-x86_64.xml", - "checksum_type": 0, + "checksum_type": 2, "btype_id": "BTYPEID:image", "buildroot_id": null, "id": 1004, @@ -102,9 +102,9 @@ "build_id": 137, "archive_id": "ARCHIVE_ID", "type_id": "ARCHIVETYPE", - "checksum": "84547200ef5002292ecdd50c62de518e", + "checksum": "e3ff2b57824a7ee9201786a624c54057de1b279fbcf6782fe25898d657ebd354", "filename": "my-image-7.4.2-2.x86_64.ova", - "checksum_type": 0, + "checksum_type": 2, "btype_id": "BTYPEID:image", "buildroot_id": null, "id": 1005, diff --git a/tests/test_hub/test_get_verify_class.py b/tests/test_hub/test_get_verify_class.py index 4acaf71..918d3e2 100644 --- a/tests/test_hub/test_get_verify_class.py +++ b/tests/test_hub/test_get_verify_class.py @@ -20,3 +20,7 @@ class TestGetVerifyClass(unittest.TestCase): def test_get_verify_class_is_adler32(self): kojihub.get_verify_class('adler32') is adler32_constructor + + def test_get_verify_class_is_sha256(self): + kojihub.get_verify_class('sha256') is hashlib.sha256 + diff --git a/vm/kojikamid.py b/vm/kojikamid.py index 96046a7..95eaf87 100755 --- a/vm/kojikamid.py +++ b/vm/kojikamid.py @@ -608,7 +608,7 @@ def upload_file(server, prefix, path): destpath = os.path.join(prefix, path) fobj = open(destpath, 'r') offset = 0 - sum = hashlib.md5() + sum = hashlib.sha256() while True: data = fobj.read(131072) if not data: @@ -619,8 +619,8 @@ def upload_file(server, prefix, path): sum.update(data) fobj.close() digest = sum.hexdigest() - server.verifyChecksum(path, digest, 'md5') - logger.info('Uploaded %s (%s bytes, md5: %s)', destpath, offset, digest) + server.verifyChecksum(path, digest, 'sha256') + logger.info('Uploaded %s (%s bytes, sha256: %s)', destpath, offset, digest) def get_mgmt_server(): @@ -709,10 +709,10 @@ def stream_logs(server, handler, builds): if contents: size = len(contents) data = base64.b64encode(contents) - digest = hashlib.md5(contents).hexdigest() + digest = hashlib.sha256(contents).hexdigest() del contents try: - server.uploadDirect(relpath, offset, size, digest, data) + server.uploadDirect(relpath, offset, size, ('sha256', digest), data) except Exception: log_local('error uploading %s' % relpath) time.sleep(1) diff --git a/vm/kojivmd b/vm/kojivmd index 6ef6c8f..0ea980f 100755 --- a/vm/kojivmd +++ b/vm/kojivmd @@ -775,14 +775,14 @@ class VMExecTask(BaseTaskHandler): fobj.close() return len(data) - def uploadDirect(self, filepath, offset, size, md5sum, data): + def uploadDirect(self, filepath, offset, size, hash, data): """ Upload contents directly to the server. """ remotepath = os.path.dirname(os.path.join(self.getUploadDir(), filepath)) filename = os.path.basename(filepath) self.session.uploadFile(remotepath, filename, size, - md5sum, offset, data) + hash, offset, data) def verifyChecksum(self, path, checksum, algo='sha1'): local_path = os.path.abspath(os.path.join(self.output_dir, path)) diff --git a/www/kojiweb/index.py b/www/kojiweb/index.py index 9291650..bde4674 100644 --- a/www/kojiweb/index.py +++ b/www/kojiweb/index.py @@ -56,7 +56,7 @@ def _setUserCookie(environ, user): raise koji.AuthError('Unable to authenticate, server secret not configured') digest_string = value + options['Secret'].value digest_string = digest_string.encode('utf-8') - shasum = hashlib.sha1(digest_string) + shasum = hashlib.sha256(digest_string) value = "%s:%s" % (shasum.hexdigest(), value) cookies = http.cookies.SimpleCookie() cookies['user'] = value @@ -96,7 +96,7 @@ def _getUserCookie(environ): raise koji.AuthError('Unable to authenticate, server secret not configured') digest_string = value + options['Secret'].value digest_string = digest_string.encode('utf-8') - shasum = hashlib.sha1(digest_string) + shasum = hashlib.sha256(digest_string) if shasum.hexdigest() != sig: authlogger.warning('invalid user cookie: %s:%s', sig, value) return None diff --git a/www/lib/kojiweb/util.py b/www/lib/kojiweb/util.py index 093c9cf..548f23a 100644 --- a/www/lib/kojiweb/util.py +++ b/www/lib/kojiweb/util.py @@ -173,7 +173,7 @@ def _genToken(environ, tstamp=None): tstamp = _truncTime() value = user + str(tstamp) + environ['koji.options']['Secret'].value value = value.encode('utf-8') - return hashlib.sha1(value).hexdigest()[-8:] + return hashlib.sha256(value).hexdigest() def _getValidTokens(environ): From e6b7f8aa03c9b76ed130b9de21859a408e2c0e93 Mon Sep 17 00:00:00 2001 From: Yuming Zhu Date: Jun 23 2020 10:11:46 +0000 Subject: [PATCH 4/5] fix param name for uploadFile --- diff --git a/hub/kojihub.py b/hub/kojihub.py index bc0b4f8..4bee189 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -10311,7 +10311,7 @@ class RootExports(object): context.session.assertPerm('admin') return make_task(*args, **opts) - def uploadFile(self, path, name, size, md5sum, offset, data, volume=None): + def uploadFile(self, path, name, size, md5sum, offset, data, volume=None, checksum=None): """upload file to the hub Files can be uploaded in chunks, if so the hash and size describe the @@ -10320,12 +10320,14 @@ class RootExports(object): :param str path: the relative path to upload to :param str name: the name of the file :param int size: size of contents (bytes) - :param str md5sum: md5sum (hex digest) of contents or tuple (hash_type, digest) - md5sum name is misleading, but it is here for backwas compatibility + :param checksum: MD5 hex digest (see md5sum) or a tuple (hash_type, digest) of contents + :type checksum: str or tuple :param str data: base64 encoded file contents :param int offset: The offset indicates where the chunk belongs. The special offset -1 is used to indicate the final chunk. + :param str md5sum: legacy param name of checksum. md5sum name is misleading, + but it is here for backwards compatibility :returns: True """ @@ -10335,14 +10337,16 @@ class RootExports(object): # we will accept offset and size as strings to work around xmlrpc limits offset = koji.decode_int(offset) size = koji.decode_int(size) - if isinstance(md5sum, str): + if checksum is None and md5sum is not None: + checksum = md5sum + if isinstance(checksum, str): # this case is for backwards compatibility verify = "md5" - digest = hash - elif hash is None: + digest = checksum + elif checksum is None: verify = None else: - verify, digest = hash + verify, digest = checksum sum_cls = get_verify_class(verify) if offset != -1: if size is not None: diff --git a/vm/kojivmd b/vm/kojivmd index 0ea980f..d3ca2c2 100755 --- a/vm/kojivmd +++ b/vm/kojivmd @@ -775,14 +775,14 @@ class VMExecTask(BaseTaskHandler): fobj.close() return len(data) - def uploadDirect(self, filepath, offset, size, hash, data): + def uploadDirect(self, filepath, offset, size, checksum, data): """ Upload contents directly to the server. """ remotepath = os.path.dirname(os.path.join(self.getUploadDir(), filepath)) filename = os.path.basename(filepath) self.session.uploadFile(remotepath, filename, size, - hash, offset, data) + checksum, offset, data) def verifyChecksum(self, path, checksum, algo='sha1'): local_path = os.path.abspath(os.path.join(self.output_dir, path)) From 8845a70101de614cd8f4fba07b5d5619b6b2da4d Mon Sep 17 00:00:00 2001 From: Yu Ming Zhu Date: Jun 23 2020 10:21:57 +0000 Subject: [PATCH 5/5] fix flake8 for kojihub.py --- diff --git a/hub/kojihub.py b/hub/kojihub.py index 4bee189..3bbcf13 100644 --- a/hub/kojihub.py +++ b/hub/kojihub.py @@ -43,11 +43,11 @@ import tarfile import tempfile import time import traceback +from urllib.parse import parse_qs +import xmlrpc.client import zipfile import rpm -import xmlrpc.client -from urllib.parse import parse_qs import koji import koji.auth