From 31ddbe51a5c4957250f095d5e4573c886773f6a2 Mon Sep 17 00:00:00 2001 From: Jan Kaluza Date: Apr 03 2018 13:58:57 +0000 Subject: Add generate_expanded_mmds which returns list of MMDs which can be submitted as MSE builds and submit them. --- diff --git a/module_build_service/models.py b/module_build_service/models.py index 93be5ca..e7c35a3 100644 --- a/module_build_service/models.py +++ b/module_build_service/models.py @@ -249,13 +249,16 @@ class ModuleBuild(MBSBase): Returns list of all latest ModuleBuilds in "ready" state for all streams for given module `name`. """ + # Prepare the subquery to find out all unique name:stream records. subq = session.query( - ModuleBuild.id, - func.max(ModuleBuild.version.cast(db.Integer)) - ).group_by(ModuleBuild.name, ModuleBuild.stream).filter_by( + func.max(ModuleBuild.id).label("maxid"), + func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)) + ).group_by(ModuleBuild.stream).filter_by( name=name, state=BUILD_STATES["ready"]).subquery('t2') + + # Use the subquery to actually return all the columns for its results. query = session.query(ModuleBuild).join( - subq, and_(ModuleBuild.id == subq.c.id)) + subq, and_(ModuleBuild.id == subq.c.maxid)) return query.all() @staticmethod @@ -263,18 +266,30 @@ class ModuleBuild(MBSBase): """ Returns the latest builds in "ready" state for given name:stream. """ + # Prepare the subquery to find out all unique name:stream records. subq = session.query( - ModuleBuild.version, - func.max(ModuleBuild.version.cast(db.Integer)) - ).group_by(ModuleBuild.name, ModuleBuild.stream).filter_by( - name=name, state=BUILD_STATES["ready"], stream=stream).subquery('t2') + func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion") + ).filter_by(name=name, state=BUILD_STATES["ready"], stream=stream).subquery('t2') + + # Use the subquery to actually return all the columns for its results. query = session.query(ModuleBuild).join( subq, and_( ModuleBuild.name == name, ModuleBuild.stream == stream, - ModuleBuild.version == subq.c.version)) + sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion)) return query.all() + @staticmethod + def get_build_from_nsvc(session, name, stream, version, context): + #TODO: Rewrite this to use self.context when we add it. + builds = session.query(ModuleBuild).filter_by( + name=name, stream=stream, version=version).all() + for build in builds: + print build.name, build.stream, build.version, build.context + if build.context == context: + return build + return None + def mmd(self): try: mmd = Modulemd.Module().new_from_string(self.modulemd) @@ -327,27 +342,26 @@ class ModuleBuild(MBSBase): # We have to use keys because GLib.Variant doesn't support `in` directly. if property_name not in mbs_xmd.keys(): raise ValueError('The module\'s modulemd hasn\'t been formatted by MBS') - mmd_property = getattr(mmd.get_dependencies()[0], 'get_{0}'.format(property_name))() - if set(mbs_xmd[property_name].keys()) != set(mmd_property.keys()): - raise ValueError('The dependencies.{0} section of the modulemd doesn\'t match ' - 'what is in xmd'.format(property_name)) mmd_formatted_property = { dep: info['ref'] for dep, info in mbs_xmd[property_name].items()} property_json = json.dumps(OrderedDict(sorted(mmd_formatted_property.items()))) rv.append(hashlib.sha1(property_json.encode('utf-8')).hexdigest()) return tuple(rv) - @property - def context(self): - if self.build_context and self.runtime_context: - combined_hashes = '{0}:{1}'.format( - self.build_context, self.runtime_context).encode('utf-8') + @staticmethod + def context_from_contexts(build_context, runtime_context): + if build_context and runtime_context: + combined_hashes = '{0}:{1}'.format(build_context, runtime_context).encode('utf-8') return hashlib.sha1(combined_hashes).hexdigest()[:8] else: # We can't compute the context because the necessary data isn't there, so return a # default value return '00000000' + @property + def context(self): + return ModuleBuild.context_from_contexts(self.build_context, self.runtime_context) + @classmethod def create(cls, session, conf, name, stream, version, modulemd, scmurl, username, copr_owner=None, copr_project=None, rebuild_strategy=None, publish_msg=True): diff --git a/module_build_service/resolver/DBResolver.py b/module_build_service/resolver/DBResolver.py index c9993ea..d30cbe5 100644 --- a/module_build_service/resolver/DBResolver.py +++ b/module_build_service/resolver/DBResolver.py @@ -98,7 +98,9 @@ class DBResolver(GenericResolver): if local_modules: dep = local_modules[0] else: - dep = models.ModuleBuild.get_last_build_in_stream(session, name, stream) + dep = models.ModuleBuild.get_last_builds_in_stream(session, name, stream) + if dep: + dep = dep[0] if dep: modules = self._get_recursively_required_modules(dep, session, modules, strict) elif strict: @@ -227,6 +229,12 @@ class DBResolver(GenericResolver): new_requires = {} with models.make_session(self.config) as session: for module_name, module_stream in requires.items(): + if ":" in module_stream: + module_stream, module_version, module_context = module_stream.split(":") + else: + module_version = None + module_context = None + local_modules = models.ModuleBuild.local_modules( session, module_name, module_stream) if local_modules: @@ -235,6 +243,7 @@ class DBResolver(GenericResolver): 'ref': None, 'stream': local_build.stream, 'version': local_build.version, + 'context': local_build.context, # No need to set filtered_rpms for local builds, because MBS # filters the RPMs automatically when the module build is # done. @@ -242,11 +251,21 @@ class DBResolver(GenericResolver): } continue - build = models.ModuleBuild.get_last_build_in_stream( - session, module_name, module_stream) - if not build: - raise UnprocessableEntity('The module {}:{} was not found'.format( - module_name, module_stream)) + if module_version is None or module_context is None: + build = models.ModuleBuild.get_last_builds_in_stream( + session, module_name, module_stream) + if build: + build = build[0] + if not build: + raise UnprocessableEntity('The module {}:{} was not found'.format( + module_name, module_stream)) + else: + build = models.ModuleBuild.get_build_from_nsvc( + session, module_name, module_stream, module_version, module_context) + if not build: + raise UnprocessableEntity('The module {}:{}:{}:{} was not found'.format( + module_name, module_stream, module_version, module_context)) + commit_hash = None filtered_rpms = [] mmd = build.mmd() @@ -260,15 +279,16 @@ class DBResolver(GenericResolver): # Find out the particular NVR of filtered packages rpm_filter = mmd.get_rpm_filter() - if rpm_filter: + if rpm_filter and rpm_filter.get(): for rpm in build.component_builds: - if rpm.package in rpm_filter: + if rpm.package in rpm_filter.get(): filtered_rpms.append(rpm.nvr) new_requires[module_name] = { 'ref': commit_hash, 'stream': module_stream, 'version': build.version, + 'context': build.context, 'filtered_rpms': filtered_rpms, } diff --git a/module_build_service/scheduler/handlers/modules.py b/module_build_service/scheduler/handlers/modules.py index 88ddc32..5fb5f22 100644 --- a/module_build_service/scheduler/handlers/modules.py +++ b/module_build_service/scheduler/handlers/modules.py @@ -145,8 +145,6 @@ def init(config, session, msg): try: mmd = build.mmd() record_component_builds(mmd, build, session=session) - build.build_context, build.runtime_context = build.contexts_from_mmd(mmd.dumps()) - mmd.set_context(build.context) build.modulemd = mmd.dumps() build.transition(conf, models.BUILD_STATES["wait"]) # Catch custom exceptions that we can expose to the user diff --git a/module_build_service/utils.py b/module_build_service/utils.py index 2787c55..ce24536 100644 --- a/module_build_service/utils.py +++ b/module_build_service/utils.py @@ -50,6 +50,7 @@ import module_build_service.messaging from multiprocessing.dummy import Pool as ThreadPool import module_build_service.resolver from module_build_service import glib +from module_build_service.mmd_resolver import MMDResolver import concurrent.futures @@ -625,22 +626,6 @@ def load_mmd(yaml, is_file=False): log.error(error) raise UnprocessableEntity(error) - # MBS doesn't support module stream expansion yet but supports the v2 modulemd format, - # so if module stream expansion syntax is used, fail the submission - # TODO: Once module stream expansion is supported, the get_dependencies() function should - # be squashed to a single list on resulting modulemds - error_msg = 'Module stream expansion is not yet supported in MBS' - deps_list = mmd.get_dependencies() - if len(deps_list) > 1: - raise UnprocessableEntity(error_msg) - elif len(deps_list) == 1: - for dep_type in ['requires', 'buildrequires']: - deps = getattr(deps_list[0], 'get_{0}'.format(dep_type))() - for streams in deps.values(): - if len(streams.get()) != 1: - raise UnprocessableEntity(error_msg) - elif streams.get()[0].startswith('-'): - raise UnprocessableEntity(error) return mmd @@ -770,7 +755,12 @@ def format_mmd(mmd, scmurl, session=None): session = db.session xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs'] = {'scmurl': scmurl or '', 'commit': ''} + if 'mbs' not in xmd: + xmd['mbs'] = {} + if 'scmurl' not in xmd['mbs']: + xmd['mbs']['scmurl'] = scmurl or '' + if 'commit' not in xmd['mbs']: + xmd['mbs']['commit'] = '' local_modules = models.ModuleBuild.local_modules(session) local_modules = {m.name + "-" + m.stream: m for m in local_modules} @@ -796,10 +786,12 @@ def format_mmd(mmd, scmurl, session=None): # Resolve buildrequires and requires # Reformat the input for resolve_requires to match the old modulemd format dep_obj = mmd.get_dependencies()[0] - br_dict = {br: br_list.get()[0] for br, br_list in dep_obj.get_buildrequires().items()} - req_dict = {req: req_list.get()[0] for req, req_list in dep_obj.get_requires().items()} - xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_dict) - xmd['mbs']['requires'] = resolver.resolve_requires(req_dict) + if 'buildrequires' not in xmd['mbs']: + br_dict = {br: br_list.get()[0] for br, br_list in dep_obj.get_buildrequires().items()} + xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_dict) + if 'requires' not in xmd['mbs']: + req_dict = {req: req_list.get()[0] for req, req_list in dep_obj.get_requires().items()} + xmd['mbs']['requires'] = resolver.resolve_requires(req_dict) if mmd.get_rpm_components() or mmd.get_module_components(): if 'rpms' not in xmd['mbs']: @@ -992,61 +984,197 @@ def submit_module_build_from_scm(username, url, branch, allow_local_url=False, return submit_module_build(username, url, mmd, scm, optional_params) +def generate_expanded_mmds(session, mmd): + """ + Returns list with MMDs with buildrequires and requires set according + to module stream expansion rules. These module metadata can be directly + built using MBS. + """ + if not session: + session = db.session + + # Create local copy of mmd, because we will have expand its dependencies, + # which would change the module. + # TODO: Use copy method once its in released libmodulemd: + # https://github.com/fedora-modularity/libmodulemd/pull/20 + current_mmd = Modulemd.Module.new_from_string(mmd.dumps()) + + # MMDResolver expects the input MMD to have no context. + current_mmd.set_context(None) + + # Expands the MSE streams. This mainly handles '-' prefix in MSE streams. + expand_mse_streams(session, current_mmd) + + # Get the list of all MMDs which this module can be possibly built against + # and add them to MMDResolver. + mmd_resolver = MMDResolver() + mmds_for_resolving = get_mmds_required_by_module_recursively( + session, current_mmd) + for m in mmds_for_resolving: + mmd_resolver.add_modules(m) + + # Show log.info message with the NSVCs we have added to mmd_resolver. + nsvcs_to_solve = [ + ":".join([m.get_name(), m.get_stream(), str(m.get_version()), str(m.get_context())]) + for m in mmds_for_resolving] + log.info("Starting resolving with following input modules: %r", nsvcs_to_solve) + + # Resolve the dependencies between modules and get the list of all valid + # combinations in which we can build this module. + requires_combinations = mmd_resolver.solve(current_mmd) + log.info("Resolving done, possible requires: %r", requires_combinations) + + # This is where we are going to store the generated MMDs. + mmds = [] + for requires in requires_combinations: + # Each generated MMD must be new Module object... + # TODO: Use copy method once its in released libmodulemd: + # https://github.com/fedora-modularity/libmodulemd/pull/20 + mmd_copy = Modulemd.Module.new_from_string(mmd.dumps()) + xmd = glib.from_variant_dict(mmd_copy.get_xmd()) + + # Requires contain the NSVC representing the input mmd. + # The 'context' of this NSVC defines the id of buildrequires/requires + # pair in the mmd.get_dependencies(). + dependencies_id = None + + # We don't want to depend on ourselves, so store the NSVC of the current_mmd + # to be able to ignore it later. + self_nsvc = None + + # Dict to store name:stream pairs from nsvc, so we are able to access it + # easily later. + req_name_stream = {} + + # Get the values for dependencies_id, self_nsvc and req_name_stream variables. + for nsvc in requires: + req_name, req_stream, _ = nsvc.split(":", 2) + if req_name == current_mmd.get_name() and req_stream == current_mmd.get_stream(): + dependencies_id = int(nsvc.split(":")[3]) + self_nsvc = nsvc + continue + req_name_stream[req_name] = req_stream + if dependencies_id is None or self_nsvc is None: + raise RuntimeError( + "%s:%s not found in requires %r" % (current_mmd.get_name(), current_mmd.get_stream(), requires)) + + # The name:[streams, ...] pairs do not have to be the same in both + # buildrequires/requires. In case they are the same, we replace the streams + # in requires section with a single stream against which we will build this MMD. + # In case they are not the same, we have to keep the streams as they in requires + # section. We always replace stream(s) for build-requirement with the one we + # will build this MMD against. + new_dep = Modulemd.Dependencies() + dep = mmd_copy.get_dependencies()[dependencies_id] + dep_requires = dep.get_requires() + dep_buildrequires = dep.get_buildrequires() + for req_name, req_streams in dep_requires.items(): + if (req_name not in dep_buildrequires or + set(req_streams.get()) != set(dep_buildrequires[req_name].get())): + # Streams in runtime section are not the same as in buildtime section, + # so just copy this runtime requirement to new_dep. + new_dep.add_requires(req_name, req_streams.get()) + else: + # This runtime requirement has the same streams in both runtime/buildtime + # requires sections, so replace streams in both sections by the one we + # really used in this resolved variant. + new_dep.add_requires(req_name, [req_name_stream[req_name]]) + new_dep.add_buildrequires(req_name, [req_name_stream[req_name]]) + mmd_copy.set_dependencies((new_dep, )) + + # The Modulemd.Dependencies() stores only streams, but to really build this + # module, we need NSVC of buildrequires. We will get it using the + # module_build_service.resolver.GenericResolver.resolve_requires, so prepare + # dict in {N: SVC, ...} format as an input for this method. + br_dict = {} + for nsvc in requires: + if nsvc == self_nsvc: + continue + req_name, req_stream, req_version, req_context, req_arch = nsvc.split(":") + br_dict[req_name] = ":".join([req_stream, req_version, req_context]) + + # The same for runtime requires, which we need to compute runtime context. + r_dict = {req: req_list.get()[0] for req, req_list in new_dep.get_requires().items()} + + # Resolve the requires/buildrequires and store the result in XMD. + if 'mbs' not in xmd: + xmd['mbs'] = {} + resolver = module_build_service.resolver.GenericResolver.create(conf) + xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_dict) + xmd['mbs']['requires'] = resolver.resolve_requires(r_dict) + + mmd_copy.set_xmd(glib.dict_values(xmd)) + + # Now we have all the info to actually compute context of this module. + build_context, runtime_context = models.ModuleBuild.contexts_from_mmd(mmd_copy.dumps()) + context = models.ModuleBuild.context_from_contexts(build_context, runtime_context) + mmd_copy.set_context(context) + + mmds.append(mmd_copy) + + return mmds + + def submit_module_build(username, url, mmd, scm, optional_params=None): import koji # Placed here to avoid py2/py3 conflicts... - # Import it here, because SCM uses utils methods - # and fails to import them because of dep-chain. validate_mmd(mmd) - module = models.ModuleBuild.query.filter_by( - name=mmd.get_name(), stream=mmd.get_stream(), version=str(mmd.get_version())).first() - if module: - log.debug('Checking whether module build already exist.') - if module.state != models.BUILD_STATES['failed']: - err_msg = ('Module (state=%s) already exists. Only a new build or resubmission of ' - 'a failed build is allowed.' % module.state) - log.error(err_msg) - raise Conflict(err_msg) - if optional_params: - rebuild_strategy = optional_params.get('rebuild_strategy') - if rebuild_strategy and module.rebuild_strategy != rebuild_strategy: - raise ValidationError('You cannot change the module\'s "rebuild_strategy" when ' - 'resuming a module build') - log.debug('Resuming existing module build %r' % module) - # Reset all component builds that didn't complete - for component in module.component_builds: - if component.state and component.state != koji.BUILD_STATES['COMPLETE']: - component.state = None - component.state_reason = None - db.session.add(component) - module.username = username - prev_state = module.previous_non_failed_state - if prev_state == models.BUILD_STATES['init']: - transition_to = models.BUILD_STATES['init'] + mmds = generate_expanded_mmds(db.session, mmd) + + for mmd in mmds: + module = models.ModuleBuild.get_build_from_nsvc( + db.session, mmd.get_name(), mmd.get_stream(), str(mmd.get_version()), + mmd.get_context()) + if module: + log.debug('Checking whether module build already exist.') + if module.state != models.BUILD_STATES['failed']: + err_msg = ('Module (state=%s) already exists. Only a new build or resubmission of ' + 'a failed build is allowed.' % module.state) + log.error(err_msg) + raise Conflict(err_msg) + if optional_params: + rebuild_strategy = optional_params.get('rebuild_strategy') + if rebuild_strategy and module.rebuild_strategy != rebuild_strategy: + raise ValidationError('You cannot change the module\'s "rebuild_strategy" when ' + 'resuming a module build') + log.debug('Resuming existing module build %r' % module) + # Reset all component builds that didn't complete + for component in module.component_builds: + if component.state and component.state != koji.BUILD_STATES['COMPLETE']: + component.state = None + component.state_reason = None + db.session.add(component) + module.username = username + prev_state = module.previous_non_failed_state + if prev_state == models.BUILD_STATES['init']: + transition_to = models.BUILD_STATES['init'] + else: + transition_to = models.BUILD_STATES['wait'] + module.batch = 0 + module.transition(conf, transition_to, "Resubmitted by %s" % username) + log.info("Resumed existing module build in previous state %s" + % module.state) else: - transition_to = models.BUILD_STATES['wait'] - module.batch = 0 - module.transition(conf, transition_to, "Resubmitted by %s" % username) - log.info("Resumed existing module build in previous state %s" - % module.state) - else: - log.debug('Creating new module build') - module = models.ModuleBuild.create( - db.session, - conf, - name=mmd.get_name(), - stream=mmd.get_stream(), - version=str(mmd.get_version()), - modulemd=mmd.dumps(), - scmurl=url, - username=username, - **(optional_params or {}) - ) - - db.session.add(module) - db.session.commit() - log.info("%s submitted build of %s, stream=%s, version=%s", username, - mmd.get_name(), mmd.get_stream(), mmd.get_version()) + log.debug('Creating new module build') + module = models.ModuleBuild.create( + db.session, + conf, + name=mmd.get_name(), + stream=mmd.get_stream(), + version=str(mmd.get_version()), + modulemd=mmd.dumps(), + scmurl=url, + username=username, + **(optional_params or {}) + ) + module.build_context, module.runtime_context = \ + module.contexts_from_mmd(module.modulemd) + + + db.session.add(module) + db.session.commit() + log.info("%s submitted build of %s, stream=%s, version=%s, context=%s", username, + mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context()) return module @@ -1404,9 +1532,58 @@ def get_reusable_component(session, module, component_name, return reusable_component +def _expand_mse_streams(session, name, streams): + """ + Helper method for `expand_mse_stream()` expanding single name:[streams]. + Returns list of expanded streams. + """ + # Stream can be prefixed with '-' sign to define that this stream should + # not appear in a resulting list of streams. There can be two situations: + # a) all streams have '-' prefix. In this case, we treat list of streams + # as blacklist and we find all the valid streams and just remove those with + # '-' prefix. + # b) there is at least one stream without '-' prefix. In this case, we can + # ignore all the streams with '-' prefix and just add those without + # '-' prefix to the list of valid streams. + streams_is_blacklist = all(stream.startswith("-") for stream in streams.get()) + if streams_is_blacklist or len(streams.get()) == 0: + builds = models.ModuleBuild.get_last_build_in_all_streams( + session, name) + expanded_streams = [build.stream for build in builds] + else: + expanded_streams = [] + for stream in streams.get(): + if stream.startswith("-"): + if streams_is_blacklist and stream[1:] in expanded_streams: + expanded_streams.remove(stream[1:]) + else: + expanded_streams.append(stream) + return expanded_streams + + +def expand_mse_streams(session, mmd): + """ + Expands streams in both buildrequires/requires sections of MMD. + """ + for deps in mmd.get_dependencies(): + expanded = {} + for name, streams in deps.get_requires().items(): + streams_set = Modulemd.SimpleSet() + streams_set.set(_expand_mse_streams(session, name, streams)) + expanded[name] = streams_set + deps.set_requires(expanded) + + expanded = {} + for name, streams in deps.get_buildrequires().items(): + streams_set = Modulemd.SimpleSet() + streams_set.set(_expand_mse_streams(session, name, streams)) + expanded[name] = streams_set + deps.set_buildrequires(expanded) + + def _get_mmds_from_requires(session, requires, mmds, recursive=False): """ - Helper method for get_modules_build_required_by_module_recursively returning + Helper method for get_mmds_required_by_module_recursively returning the list of module metadata objects defined by `requires` dict. :param session: SQLAlchemy DB session. @@ -1419,57 +1596,41 @@ def _get_mmds_from_requires(session, requires, mmds, recursive=False): # To be able to call itself recursively, we need to store list of mmds # we have added to global mmds list in this particular call. added_mmds = {} - for name, streams in requires: - # Stream can be prefixed with '-' sign to define that this stream should - # not appear in a resulting list of streams. There can be two situations: - # a) all streams have '-' prefix. In this case, we treat list of streams - # as blacklist and we find all the valid streams and just remove those with - # '-' prefix. - # b) there is at least one stream without '-' prefix. In this case, we can - # ignore all the streams with '-' prefix and just add those without - # '-' prefix to the list of valid streams. - streams_is_blacklist = all(stream.startswith("-") for stream in streams.get()) - if streams_is_blacklist or len(streams.get()) == 0: - builds = models.ModuleBuild.get_last_build_in_all_streams( - session, name) - valid_streams = [build.stream for build in builds] - else: - valid_streams = [] - for stream in streams.get(): - if stream.startswith("-"): - if streams_is_blacklist and stream[1:] in valid_streams: - valid_streams.remove(stream[1:]) - else: - valid_streams.append(stream) - + for name, streams in requires.items(): # For each valid stream, find the last build in a stream and also all # its contexts and add mmds of these builds to `mmds` and `added_mmds`. # Of course only do that if we have not done that already in some # previous call of this method. - for stream in valid_streams: + for stream in streams.get(): ns = "%s:%s" % (name, stream) if ns in mmds: continue builds = models.ModuleBuild.get_last_builds_in_stream( session, name, stream) - mmds[ns] = [build.mmd() for build in builds] - added_mmds[ns] = mmds[ns] + if not builds: + raise ValueError("Cannot find any module build for %s:%s " + "in MBS database" % (name, stream)) + else: + mmds[ns] = [build.mmd() for build in builds] + added_mmds[ns] = mmds[ns] # Get the requires recursively. if recursive: for mmd_list in added_mmds.values(): for mmd in mmd_list: for deps in mmd.get_dependencies(): - mmds = _get_mmds_from_requires(session, deps.get_requires().items(), mmds, True) + mmds = _get_mmds_from_requires(session, deps.get_requires(), mmds, True) return mmds -def get_modules_build_required_by_module_recursively(session, mmd): +def get_mmds_required_by_module_recursively(session, mmd): """ Returns the list of Module metadata objects of all modules required while - building the module defined by `mmd` module metadata. + building the module defined by `mmd` module metadata. This presumes the + module metadata streams are expanded using `expand_mse_streams(...)` + method. This method finds out latest versions of all the build-requires of the `mmd` module and then also all contexts of these latest versions. @@ -1490,13 +1651,13 @@ def get_modules_build_required_by_module_recursively(session, mmd): # At first get all the buildrequires of the module of interest. for deps in mmd.get_dependencies(): - mmds = _get_mmds_from_requires(session, deps.get_buildrequires().items(), mmds) + mmds = _get_mmds_from_requires(session, deps.get_buildrequires(), mmds) # Now get the requires of buildrequires recursively. for mmd_key in list(mmds.keys()): for mmd in mmds[mmd_key]: for deps in mmd.get_dependencies(): - mmds = _get_mmds_from_requires(session, deps.get_requires().items(), mmds, True) + mmds = _get_mmds_from_requires(session, deps.get_requires(), mmds, True) # Make single list from dict of lists. res = [] diff --git a/tests/__init__.py b/tests/__init__.py index 7c1be49..f43c101 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -87,25 +87,26 @@ def patch_zeromq_time_sleep(): patch_zeromq_time_sleep() -def clean_database(): +def clean_database(add_platform_module=True): db.session.commit() db.drop_all() db.create_all() - platform = ModuleBuild() - platform.name = 'platform' - platform.stream = 'f28' - platform.version = '3' - platform.koji_tag = 'module-f28-build' - platform.state = BUILD_STATES['ready'] - with open(os.path.join(base_dir, 'staged_data', 'platform.yaml')) as f: - platform.modulemd = f.read() - platform.rebuild_strategy = 'all' - platform.owner = 'releng' - platform.time_submitted = datetime.utcnow() - platform.time_modified = datetime.utcnow() - platform.time_completed = datetime.utcnow() - db.session.add(platform) - db.session.commit() + if add_platform_module: + platform = ModuleBuild() + platform.name = 'platform' + platform.stream = 'f28' + platform.version = '3' + platform.koji_tag = 'module-f28-build' + platform.state = BUILD_STATES['ready'] + with open(os.path.join(base_dir, 'staged_data', 'platform.yaml')) as f: + platform.modulemd = f.read() + platform.rebuild_strategy = 'all' + platform.owner = 'releng' + platform.time_submitted = datetime.utcnow() + platform.time_modified = datetime.utcnow() + platform.time_completed = datetime.utcnow() + db.session.add(platform) + db.session.commit() def init_data(data_size=10, contexts=False): diff --git a/tests/staged_data/platform.yaml b/tests/staged_data/platform.yaml index 1e46842..6125339 100644 --- a/tests/staged_data/platform.yaml +++ b/tests/staged_data/platform.yaml @@ -16,6 +16,7 @@ data: stream: f28 summary: Fedora 28 traditional base version: 3 + context: 00000000 xmd: mbs: buildrequires: {} diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py index 517a92e..c6686c1 100644 --- a/tests/test_build/test_build.py +++ b/tests/test_build/test_build.py @@ -27,6 +27,7 @@ from os.path import dirname from shutil import copyfile from datetime import datetime, timedelta from random import randint +import hashlib import module_build_service.messaging import module_build_service.scheduler.handlers.repos @@ -278,6 +279,13 @@ class FakeModuleBuilder(GenericBuilder): return msgs +original_context_from_contexts = models.ModuleBuild.context_from_contexts +def mocked_context_from_contexts(build_context, runtime_context): + if build_context == "return_runtime_context": + return runtime_context + return original_context_from_contexts(build_context, runtime_context) + + def cleanup_moksha(): # Necessary to restart the twisted reactor for the next test. import sys @@ -853,11 +861,15 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg): + @patch("module_build_service.models.ModuleBuild.context_from_contexts") + def test_submit_build_resume(self, context_from_contexts, mocked_scm, mocked_get_user, + conf_system, dbg): """ Tests that resuming the build works even when previous batches are already built. """ + context_from_contexts.side_effect = mocked_context_from_contexts + now = datetime.utcnow() submitted_time = now - timedelta(minutes=3) # Create a module in the failed state @@ -865,8 +877,8 @@ class TestBuild: build_one.name = 'testmodule' build_one.stream = 'master' build_one.version = 20180205135154 - build_one.build_context = 'ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0' - build_one.runtime_context = 'ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0' + build_one.build_context = 'return_runtime_context' + build_one.runtime_context = 'c2c572ec' build_one.state = models.BUILD_STATES['failed'] current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( @@ -974,12 +986,16 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') + @patch("module_build_service.models.ModuleBuild.context_from_contexts") def test_submit_build_resume_recover_orphaned_macros( - self, mocked_scm, mocked_get_user, conf_system, dbg): + self, context_from_contexts, mocked_scm, mocked_get_user, + conf_system, dbg): """ Tests that resuming the build works when module-build-macros is orphaned but marked as failed in the database """ + context_from_contexts.side_effect = mocked_context_from_contexts + FakeModuleBuilder.INSTANT_COMPLETE = True now = datetime.utcnow() submitted_time = now - timedelta(minutes=3) @@ -988,8 +1004,8 @@ class TestBuild: build_one.name = 'testmodule' build_one.stream = 'master' build_one.version = 20180205135154 - build_one.build_context = 'ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0' - build_one.runtime_context = 'ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0' + build_one.build_context = 'return_runtime_context' + build_one.runtime_context = 'c2c572ec' build_one.state = models.BUILD_STATES['failed'] current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( diff --git a/tests/test_utils/test_utils.py b/tests/test_utils/test_utils.py index 3e0ff20..a9387c8 100644 --- a/tests/test_utils/test_utils.py +++ b/tests/test_utils/test_utils.py @@ -286,12 +286,14 @@ class TestUtils: 'ref': 'virtual', 'stream': 'f28', 'version': '3', + 'context': '00000000', 'filtered_rpms': []}}, 'requires': { 'platform': { 'version': '3', 'ref': 'virtual', 'stream': 'f28', + 'context': '00000000', 'filtered_rpms': []}}, 'rpms': { 'perl-List-Compare': {'ref': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'}, diff --git a/tests/test_utils/test_utils_mse.py b/tests/test_utils/test_utils_mse.py index da96764..fc1cb46 100644 --- a/tests/test_utils/test_utils_mse.py +++ b/tests/test_utils/test_utils_mse.py @@ -29,27 +29,27 @@ from mock import patch import pytest import module_build_service.utils -from module_build_service import models, conf +from module_build_service import models, conf, glib from tests import (db, clean_database) class TestUtilsModuleStreamExpansion: def setup_method(self, test_method): - clean_database() + clean_database(False) - def mocked_context(modulebuild_instance): + def mocked_context(build_context, runtime_context): """ Changes the ModuleBuild.context behaviour to return ModuleBuild.build_context instead of computing new context hash. """ - return modulebuild_instance.build_context + return build_context[:8] # For these tests, we need the ModuleBuild.context to return the well-known # context as we define it in test data. Therefore patch the ModuleBuild.context # to return ModuleBuild.build_context, which we can control. self.modulebuild_context_patcher = patch( - "module_build_service.models.ModuleBuild.context", autospec=True) + "module_build_service.models.ModuleBuild.context_from_contexts") modulebuild_context = self.modulebuild_context_patcher.start() modulebuild_context.side_effect = mocked_context @@ -88,6 +88,13 @@ class TestUtilsModuleStreamExpansion: if not isinstance(build_requires_list, list): build_requires_list = [build_requires_list] + xmd = { + "mbs": { + "buildrequires": [], + "requires": [], + "commit": "ref_%s" % context, + } + } deps_list = [] for requires, build_requires in zip(requires_list, build_requires_list): deps = Modulemd.Dependencies() @@ -97,6 +104,7 @@ class TestUtilsModuleStreamExpansion: deps.add_buildrequires(req_name, req_streams) deps_list.append(deps) mmd.set_dependencies(deps_list) + mmd.set_xmd(glib.dict_values(xmd)) module_build = module_build_service.models.ModuleBuild() module_build.name = name @@ -117,14 +125,16 @@ class TestUtilsModuleStreamExpansion: return module_build - def _get_modules_build_required_by_module_recursively(self, module_build): + def _get_mmds_required_by_module_recursively(self, module_build): """ - Convenience wrapper around get_modules_build_required_by_module_recursively + Convenience wrapper around get_mmds_required_by_module_recursively returning the list with nsvc strings of modules returned by this the wrapped method. """ - modules = module_build_service.utils.get_modules_build_required_by_module_recursively( - db.session, module_build.mmd()) + mmd = module_build.mmd() + module_build_service.utils.expand_mse_streams(db.session, mmd) + modules = module_build_service.utils.get_mmds_required_by_module_recursively( + db.session, mmd) nsvcs = [":".join([m.get_name(), m.get_stream(), str(m.get_version()), m.get_context()]) for m in modules] return nsvcs @@ -145,6 +155,133 @@ class TestUtilsModuleStreamExpansion: self._make_module("platform:f28:0:c10", {}, {}) self._make_module("platform:f29:0:c11", {}, {}) + def test_generate_expanded_mmds_context(self): + self._generate_default_modules() + module_build = self._make_module( + "app:1:0:c1", {"gtk": ["1", "2"]}, {"gtk": ["1", "2"]}) + mmds = module_build_service.utils.generate_expanded_mmds( + db.session, module_build.mmd()) + contexts = set([mmd.get_context() for mmd in mmds]) + assert set(['3031e5a5', '6d10e00e']) == contexts + + @pytest.mark.parametrize('requires,build_requires,expected_xmd,expected_buildrequires', [ + ({"gtk": ["1", "2"]}, {"gtk": ["1", "2"]}, + set([ + frozenset(['platform:f28:0:c10', 'gtk:2:0:c4']), + frozenset(['platform:f28:0:c10', 'gtk:1:0:c2']) + ]), + set([ + frozenset(['gtk:1']), + frozenset(['gtk:2']), + ])), + + ({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"]}, + set([ + frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) + ]), + set([ + frozenset(['foo:1', 'gtk:1']) + ])), + + ({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, + set([ + frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) + ]), + set([ + frozenset(['foo:1', 'gtk:1']) + ])), + + ({"gtk": ["-2"], "foo": ["-2"]}, {"gtk": ["-2"], "foo": ["-2"]}, + set([ + frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) + ]), + set([ + frozenset(['foo:1', 'gtk:1']) + ])), + + ({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["-1", "1"], "foo": ["-2", "1"]}, + set([ + frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) + ]), + set([ + frozenset(['foo:1', 'gtk:1']) + ])), + ]) + def test_generate_expanded_mmds_buildrequires(self, requires, build_requires, + expected_xmd, expected_buildrequires): + self._generate_default_modules() + module_build = self._make_module("app:1:0:c1", requires, build_requires) + mmds = module_build_service.utils.generate_expanded_mmds( + db.session, module_build.mmd()) + + buildrequires_per_mmd_xmd = set() + buildrequires_per_mmd_buildrequires = set() + for mmd in mmds: + xmd = glib.from_variant_dict(mmd.get_xmd()) + br_nsvcs = [] + for name, detail in xmd['mbs']['buildrequires'].items(): + br_nsvcs.append(":".join([ + name, detail["stream"], detail["version"], detail["context"]])) + buildrequires_per_mmd_xmd.add(frozenset(br_nsvcs)) + + assert len(mmd.get_dependencies()) == 1 + + buildrequires = set() + dep = mmd.get_dependencies()[0] + for req_name, req_streams in dep.get_buildrequires().items(): + for req_stream in req_streams.get(): + buildrequires.add(":".join([req_name, req_stream])) + buildrequires_per_mmd_buildrequires.add(frozenset(buildrequires)) + + assert buildrequires_per_mmd_xmd == expected_xmd + assert buildrequires_per_mmd_buildrequires == expected_buildrequires + + @pytest.mark.parametrize('requires,build_requires,expected', [ + ({"gtk": ["1", "2"]}, {"gtk": ["1", "2"]}, + set([ + frozenset(['gtk:1']), + frozenset(['gtk:2']), + ])), + + ({"gtk": ["1", "2"]}, {"gtk": ["1"]}, + set([ + frozenset(['gtk:1', 'gtk:2']), + ])), + + ({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"]}, + set([ + frozenset(['foo:1', 'gtk:1']), + ])), + + ({"gtk": ["-2"], "foo": ["-2"]}, {"gtk": ["-2"], "foo": ["-2"]}, + set([ + frozenset(['foo:1', 'gtk:1']), + ])), + + ({"gtk": ["-1", "1"], "foo": ["-2", "1"]}, {"gtk": ["-1", "1"], "foo": ["-2", "1"]}, + set([ + frozenset(['foo:1', 'gtk:1']), + ])), + + ]) + def test_generate_expanded_mmds_requires(self, requires, build_requires, expected): + self._generate_default_modules() + module_build = self._make_module("app:1:0:c1", requires, build_requires) + mmds = module_build_service.utils.generate_expanded_mmds( + db.session, module_build.mmd()) + + requires_per_mmd = set() + for mmd in mmds: + assert len(mmd.get_dependencies()) == 1 + requires = set() + dep = mmd.get_dependencies()[0] + for req_name, req_streams in dep.get_requires().items(): + for req_stream in req_streams.get(): + requires.add(":".join([req_name, req_stream])) + requires_per_mmd.add(frozenset(requires)) + + assert requires_per_mmd == expected + @pytest.mark.parametrize('requires,build_requires,expected', [ ({}, {"gtk": ["1", "2"]}, ['platform:f29:0:c11', 'gtk:2:0:c4', 'gtk:2:0:c5', @@ -174,8 +311,7 @@ class TestUtilsModuleStreamExpansion: def test_get_required_modules_simple(self, requires, build_requires, expected): module_build = self._make_module("app:1:0:c1", requires, build_requires) self._generate_default_modules() - nsvcs = self._get_modules_build_required_by_module_recursively(module_build) - print nsvcs + nsvcs = self._get_mmds_required_by_module_recursively(module_build) assert set(nsvcs) == set(expected) def _generate_default_modules_recursion(self): @@ -207,6 +343,5 @@ class TestUtilsModuleStreamExpansion: def test_get_required_modules_recursion(self, requires, build_requires, expected): module_build = self._make_module("app:1:0:c1", requires, build_requires) self._generate_default_modules_recursion() - nsvcs = self._get_modules_build_required_by_module_recursively(module_build) - print nsvcs + nsvcs = self._get_mmds_required_by_module_recursively(module_build) assert set(nsvcs) == set(expected) diff --git a/tests/test_views/test_views.py b/tests/test_views/test_views.py index 5f09fc6..c92c19a 100644 --- a/tests/test_views/test_views.py +++ b/tests/test_views/test_views.py @@ -809,20 +809,6 @@ class TestViews: assert data['error'] == 'Bad Request' @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - def test_submit_build_mse_unsupported(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule_mse.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'git://pkgs.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) - data = json.loads(rv.data) - assert data['status'] == 422 - assert data['message'] == 'Module stream expansion is not yet supported in MBS' - assert data['error'] == 'Unprocessable Entity' - - @patch('module_build_service.auth.get_user', return_value=user) def test_submit_build_set_owner(self, mocked_get_user): data = { 'branch': 'master',