| |
@@ -24,6 +24,7 @@
|
| |
from __future__ import absolute_import, division
|
| |
|
| |
import copy
|
| |
+ import filecmp
|
| |
import glob
|
| |
import grp
|
| |
import io
|
| |
@@ -80,7 +81,15 @@
|
| |
ServerExit,
|
| |
ServerRestart
|
| |
)
|
| |
- from koji.util import dslice, dslice_ex, isSuccess, parseStatus, to_list, format_shell_cmd
|
| |
+ from koji.util import (
|
| |
+ dslice,
|
| |
+ dslice_ex,
|
| |
+ format_shell_cmd,
|
| |
+ isSuccess,
|
| |
+ joinpath,
|
| |
+ parseStatus,
|
| |
+ to_list,
|
| |
+ )
|
| |
|
| |
try:
|
| |
import requests_gssapi as reqgssapi
|
| |
@@ -5557,6 +5566,86 @@
|
| |
Methods = ['newRepo']
|
| |
_taskWeight = 0.1
|
| |
|
| |
+ def copy_arch_repo(self, src_repo_id, src_repo_path, repo_id, arch):
|
| |
+ """Copy repodata, return False if it fails"""
|
| |
+ dst_repodata = joinpath(self.workdir, arch, 'repodata')
|
| |
+ src_repodata = joinpath(src_repo_path, arch, 'repodata')
|
| |
+ try:
|
| |
+ # copy repodata
|
| |
+ self.logger.debug('Copying repodata %s to %s' % (src_repodata, dst_repodata))
|
| |
+ if os.path.exists(src_repodata):
|
| |
+ # symlink=True is not needed as they are no part of arch repodir
|
| |
+ shutil.copytree(src_repodata, dst_repodata)
|
| |
+ uploadpath = self.getUploadDir()
|
| |
+ files = []
|
| |
+ for f in os.listdir(dst_repodata):
|
| |
+ files.append(f)
|
| |
+ self.session.uploadWrapper('%s/%s' % (dst_repodata, f), uploadpath, f)
|
| |
+ return [uploadpath, files]
|
| |
+ except Exception as ex:
|
| |
+ self.logger.warning("Copying repo %i to %i failed. %r" % (src_repo_id, repo_id, ex))
|
| |
+ # Try to remove potential leftovers and fail if there is some problem
|
| |
+ koji.util.rmtree(dst_repodata, self.logger)
|
| |
+ return False
|
| |
+
|
| |
+ def check_repo(self, src_repo_path, dst_repo_path, src_repo, dst_repo, opts):
|
| |
+ """Check if oldrepo is reusable as is and can be directly copied"""
|
| |
+ # with_src, debuginfo, pkglist, blocklist, grouplist
|
| |
+ # We're ignoring maven support here. It is handled in repo_init which is called
|
| |
+ # always, so it doesn't affect efficiency of pre-cloning rpm repos.
|
| |
+ if not src_repo_path:
|
| |
+ self.logger.debug("Source repo wasn't found")
|
| |
+ return False
|
| |
+ if not os.path.isdir(src_repo_path):
|
| |
+ self.logger.debug("Source repo doesn't exist %s" % src_repo_path)
|
| |
+ return False
|
| |
+ try:
|
| |
+ repo_json = koji.load_json(joinpath(src_repo_path, 'repo.json'))
|
| |
+ for key in ('with_debuginfo', 'with_src', 'with_separate_src'):
|
| |
+ if repo_json.get(key, False) != opts.get(key, False):
|
| |
+ return False
|
| |
+ except IOError:
|
| |
+ self.logger.debug("Can't open repo.json for repo {repo_info['id']}")
|
| |
+ return False
|
| |
+
|
| |
+ # compare comps if they exist
|
| |
+ src_comps_path = joinpath(src_repo_path, 'groups/comps.xml')
|
| |
+ dst_comps_path = joinpath(dst_repo_path, 'groups/comps.xml')
|
| |
+ src_exists = os.path.exists(src_comps_path)
|
| |
+ if src_exists != os.path.exists(dst_comps_path):
|
| |
+ self.logger.debug("Comps exists only in one repo")
|
| |
+ return False
|
| |
+ if src_exists and not filecmp.cmp(src_comps_path, dst_comps_path, shallow=False):
|
| |
+ self.logger.debug("Comps differs")
|
| |
+ return False
|
| |
+
|
| |
+ # if there is any external repo, don't trust the repodata
|
| |
+ if self.session.getExternalRepoList(src_repo['tag_id'], event=src_repo['create_event']):
|
| |
+ self.logger.debug("Source repo use external repos")
|
| |
+ return False
|
| |
+ if self.session.getExternalRepoList(dst_repo['tag_id'], event=dst_repo['create_event']):
|
| |
+ self.logger.debug("Destination repo use external repos")
|
| |
+ return False
|
| |
+
|
| |
+ self.logger.debug('Repo test passed')
|
| |
+ return True
|
| |
+
|
| |
+ def check_arch_repo(self, src_repo_path, dst_repo_path, arch):
|
| |
+ """More checks based on architecture content"""
|
| |
+ for fname in ('blocklist', 'pkglist'):
|
| |
+ src_file = joinpath(src_repo_path, arch, fname)
|
| |
+ dst_file = joinpath(dst_repo_path, arch, fname)
|
| |
+ # both must non/exist
|
| |
+ if not os.path.exists(src_file) or not os.path.exists(dst_file):
|
| |
+ self.logger.debug("%s doesn't exit in one of the repos" % fname)
|
| |
+ return False
|
| |
+ # content must be same
|
| |
+ if not filecmp.cmp(src_file, dst_file, shallow=False):
|
| |
+ self.logger.debug('%s differs' % fname)
|
| |
+ return False
|
| |
+ self.logger.debug('Arch repo test passed %s' % arch)
|
| |
+ return True
|
| |
+
|
| |
def handler(self, tag, event=None, src=False, debuginfo=False, separate_src=False):
|
| |
tinfo = self.session.getTag(tag, strict=True, event=event)
|
| |
kwargs = {}
|
| |
@@ -5585,6 +5674,10 @@
|
| |
else:
|
| |
oldrepo_state = koji.REPO_READY
|
| |
oldrepo = self.session.getRepo(tinfo['id'], state=oldrepo_state)
|
| |
+ oldrepo_path = None
|
| |
+ if oldrepo:
|
| |
+ oldrepo_path = koji.pathinfo.repo(oldrepo['id'], tinfo['name'])
|
| |
+ oldrepo['tag_id'] = tinfo['id']
|
| |
# If there is no old repo, try to find first usable repo in
|
| |
# inheritance chain and use it as a source. oldrepo is not used if
|
| |
# createrepo_update is not set, so don't waste call in such case.
|
| |
@@ -5595,28 +5688,49 @@
|
| |
for tag in sorted(tags, key=lambda x: x['currdepth']):
|
| |
oldrepo = self.session.getRepo(tag['parent_id'], state=oldrepo_state)
|
| |
if oldrepo:
|
| |
+ parenttag = self.session.getTag(tag['parent_id'])
|
| |
+ oldrepo_path = koji.pathinfo.repo(oldrepo['id'], parenttag['name'])
|
| |
+ oldrepo['tag_id'] = parenttag['id']
|
| |
break
|
| |
+ newrepo_path = koji.pathinfo.repo(repo_id, tinfo['name'])
|
| |
+ newrepo = {'tag_id': tinfo['id'], 'create_event': event_id}
|
| |
+ if self.options.copy_old_repodata:
|
| |
+ possibly_clonable = self.check_repo(oldrepo_path, newrepo_path,
|
| |
+ oldrepo, newrepo, kwargs)
|
| |
+ else:
|
| |
+ possibly_clonable = False
|
| |
subtasks = {}
|
| |
+ data = {}
|
| |
+ cloned_archs = []
|
| |
for arch in arches:
|
| |
+ if possibly_clonable and self.check_arch_repo(oldrepo_path, newrepo_path, arch):
|
| |
+ result = self.copy_arch_repo(oldrepo['id'], oldrepo_path, repo_id, arch)
|
| |
+ if result:
|
| |
+ data[arch] = result
|
| |
+ cloned_archs.append(arch)
|
| |
+ continue
|
| |
+ # if we can't copy old repo directly, trigger normal createrepo
|
| |
arglist = [repo_id, arch, oldrepo]
|
| |
subtasks[arch] = self.session.host.subtask(method='createrepo',
|
| |
arglist=arglist,
|
| |
label=arch,
|
| |
parent=self.id,
|
| |
arch='noarch')
|
| |
-
|
| |
# gather subtask results
|
| |
- data = {}
|
| |
if subtasks:
|
| |
results = self.wait(to_list(subtasks.values()), all=True, failany=True)
|
| |
for (arch, task_id) in six.iteritems(subtasks):
|
| |
data[arch] = results[task_id]
|
| |
- self.logger.debug("DEBUG: %r : %r " % (arch, data[arch],))
|
| |
|
| |
# finalize
|
| |
kwargs = {}
|
| |
if event is not None:
|
| |
kwargs['expire'] = True
|
| |
+ if cloned_archs:
|
| |
+ kwargs['repo_json_updates'] = {
|
| |
+ 'cloned_from_repo_id': oldrepo['id'],
|
| |
+ 'cloned_archs': cloned_archs,
|
| |
+ }
|
| |
self.session.host.repoDone(repo_id, data, **kwargs)
|
| |
return repo_id, event_id
|
| |
|
| |
@@ -6477,6 +6591,7 @@
|
| |
'createrepo_skip_stat': True,
|
| |
'createrepo_update': True,
|
| |
'distrepo_skip_stat': False,
|
| |
+ 'copy_old_repodata': False,
|
| |
'mock_bootstrap_image': False,
|
| |
'pkgurl': None,
|
| |
'allowed_scms': '',
|
| |
@@ -6513,7 +6628,7 @@
|
| |
'build_arch_can_fail', 'no_ssl_verify', 'log_timestamps',
|
| |
'allow_noverifyssl', 'allowed_scms_use_config',
|
| |
'allowed_scms_use_policy', 'allow_password_in_scm_url',
|
| |
- 'distrepo_skip_stat']:
|
| |
+ 'distrepo_skip_stat', 'copy_old_repodata']:
|
| |
defaults[name] = config.getboolean('kojid', name)
|
| |
elif name in ['plugin', 'plugins']:
|
| |
defaults['plugin'] = value.split()
|
| |
Related: https://pagure.io/koji/issue/3808