#210 Use MBS instead of PDC to lookup modules.
Merged 9 months ago by jkaluza. Opened 9 months ago by jkaluza.
jkaluza/odcs remove-pdc  into  master

file modified
-5

@@ -25,11 +25,6 @@ 

  # RUNROOT settings

  runroot = False

  

- # PDC settings

- pdc_url = '{{ config.pdc_url }}'

- pdc_insecure = {{ config.pdc_insecure }}

- pdc_develop = {{ config.pdc_develop }}

- 

  # PKGSET

  {%- if config.pkgset_source == 'repos' %}

  pkgset_source = 'repos'

@@ -37,7 +37,7 @@ 

  from concurrent.futures import ThreadPoolExecutor

  import glob

  import odcs.server.utils

- import odcs.server.pdc

+ import odcs.server.mbs

  import defusedxml.ElementTree

  

  

@@ -290,20 +290,19 @@ 

      elif compose.source_type == PungiSourceType.MODULE:

          # Resolve the latest release of modules which do not have the release

          # string defined in the compose.source.

-         pdc = odcs.server.pdc.PDC(conf)

+         mbs = odcs.server.mbs.MBS(conf)

          modules = compose.source.split(" ")

  

-         specified_modules = []

+         specified_mbs_modules = []

          for module in modules:

-             variant_dict = pdc.variant_dict_from_str(module)

-             specified_modules.append(pdc.get_latest_module(**variant_dict))

+             specified_mbs_modules += mbs.get_latest_modules(module)

  

          expand = not compose.flags & COMPOSE_FLAGS["no_deps"]

-         new_modules = pdc.validate_module_list(specified_modules, expand=expand)

+         new_mbs_modules = mbs.validate_module_list(specified_mbs_modules, expand=expand)

  

          uids = sorted(

-             "{variant_id}:{variant_version}:{variant_release}".format(**m)

-             for m in new_modules)

+             "{name}:{stream}:{version}:{context}".format(**m)

+             for m in new_mbs_modules)

          compose.source = ' '.join(uids)

  

  

file modified
+4 -12

@@ -114,18 +114,6 @@ 

              'type': int,

              'default': 30,

              'desc': 'Global network retry interval for read/write operations, in seconds.'},

-         'pdc_url': {

-             'type': str,

-             'default': '',

-             'desc': 'PDC URL.'},

-         'pdc_insecure': {

-             'type': bool,

-             'default': False,

-             'desc': 'Allow insecure connection to PDC.'},

-         'pdc_develop': {

-             'type': bool,

-             'default': False,

-             'desc': 'PDC Development mode, basically noauth.'},

          'arches': {

              'type': list,

              'default': ["x86_64"],

@@ -201,6 +189,10 @@ 

              'type': int,

              'default': 72 * 60 * 60,

              'desc': 'Max number of seconds for which the compose is available.'},

+         'mbs_url': {

+             'type': str,

+             'default': "http://localhost/module-build-service",

+             'desc': 'URL to MSB API.'},

          'num_concurrent_pungi': {

              'type': int,

              'default': 2,

@@ -0,0 +1,165 @@ 

+ # -*- coding: utf-8 -*-

+ # Copyright (c) 2017  Red Hat, Inc.

+ #

+ # Permission is hereby granted, free of charge, to any person obtaining a copy

+ # of this software and associated documentation files (the "Software"), to deal

+ # in the Software without restriction, including without limitation the rights

+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell

+ # copies of the Software, and to permit persons to whom the Software is

+ # furnished to do so, subject to the following conditions:

+ #

+ # The above copyright notice and this permission notice shall be included in all

+ # copies or substantial portions of the Software.

+ #

+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR

+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,

+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE

+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER

+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,

+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

+ # SOFTWARE.

+ #

+ 

+ import requests

+ 

+ import odcs.server.utils

+ from odcs.server import log

+ 

+ import gi

+ gi.require_version('Modulemd', '1.0') # noqa

+ from gi.repository import Modulemd

+ 

+ 

+ class ModuleLookupError(Exception):

+     pass

+ 

+ 

+ class MBS(object):

+     def __init__(self, config):

+         self.mbs_url = config.mbs_url.rstrip("/")

+ 

+     @odcs.server.utils.retry(wait_on=(requests.ConnectionError, ), logger=log)

+     def get_modules(self, **params):

+         url = self.mbs_url + "/1/module-builds/"

+         r = requests.get(url, params=params)

+         r.raise_for_status()

+         return r.json()

+ 

+     def get_latest_modules(self, nsvc):

+         """

+         Query MBS and return the latest version of the module specified by nsvc.

+ 

+         :param nsvc: N:S:V[:C] of a module to include in a compose.

+         :raises ModuleLookupError: if the module couldn't be found

+         :return: the latest version of the module.

+         """

+         params = {

+             "nsvc": nsvc,

+             "state": 5,  # 5 is "ready".

+             "verbose": True,  # Needed to get modulemd in response.

+             "order_desc_by": "version",

+         }

+         modules = self.get_modules(**params)

+ 

+         if not modules["meta"]["total"]:

+             raise ModuleLookupError(

+                 "Failed to find module %s in the MBS." % nsvc)

+ 

+         ret = []

+         # In case the nsvc is just "name:stream", there might be multiple

+         # versions of a module in MBS response. The modules in response are

+         # sorted DESC by version, so the latest module is always the first

+         # one. So simply get the first module and add to `ret` all the next

+         # modules in a response list which have the same version - this

+         # basically adds all the contexts of the module with latest version

+         # to `ret`.

+         for module in modules["items"]:

+             if ret and ret[0]["version"] != module["version"]:

+                 break

+             ret.append(module)

+         return ret

+ 

+     def _add_new_dependencies(self, module_map, modules):

+         """

+         Helper for ``validate_module_list()`` - scans ``modules`` and adds any missing

+         requirements to ``module_map``.

+ 

+         :param module_map: dict mapping module name:stream to module.

+         :param modules: the list of modules to scan for dependencies.

+         :return: a list of any modules that were added to ``module_map``.

+         """

+ 

+         new_modules = []

+         for module in modules:

+             mmd = Modulemd.Module.new_from_string(module['modulemd'])

+             mmd.upgrade()

+ 

+             # Check runtime dependency (name:stream) of a module and if this

+             # dependency is already in module_map/new_modules, do nothing.

+             # But otherwise get the latest module in this name:stream from MBS

+             # and add it to new_modules/module_map.

+             for deps in mmd.get_dependencies():

+                 for name, streams in deps.get_requires().items():

+                     for stream in streams.get():

+                         key = "%s:%s" % (name, stream)

+                         if key not in module_map:

+                             new_module = self.get_latest_modules(key)

+                             new_modules += new_module

+                             module_map[key] = [new_modules]

+ 

+         return new_modules

+ 

+     def validate_module_list(self, modules, expand=True):

+         """

+         Given a list of modules as returned by `get_modules()`, checks that

+         there are no conflicting duplicates, removes any exact duplicates,

+         and if ``expand`` is set, recursively adds in required modules until

+         all dependencies are specified.

+ 

+         :param modules: a list of modules as returned by ``get_modules()`` or

+                 ``get_latest_module()``

+         :param expand: if required modules should be included in the returned

+                 list.

+         :return: the list of modules with deduplication and expansion.

+         :raises ModuleLookupError: if a required module couldn't be found, or a

+                 conflict occurred when resolving dependencies.

+         """

+ 

+         # List of modules we are going to return.

+         new_modules = []

+         # Temporary dict with "name:stream" as key and list of module dicts

+         # as value.

+         module_map = {}

+ 

+         for module in modules:

+             key = "%s:%s" % (module['name'], module['stream'])

+ 

+             # If this module is not in `new_modules` yet, add it there and

+             # continue to next module.

+             if key not in module_map:

+                 module_map[key] = [module]

+                 new_modules.append(module)

+                 continue

+ 

+             # Check if there is already this module in new_modules, but in

+             # different version. If so, raise an exception.

+             old_modules = module_map[key]

+             if (module['version'] != old_modules[0]['version']):

+                 raise ModuleLookupError(

+                     "%s:%s:%s:%s conflicts with %s:%s:%s:%s" % (

+                         module['name'], module["stream"], module["version"],

+                         module["context"], old_modules[0]['name'],

+                         old_modules[0]["stream"], old_modules[0]["version"],

+                         old_modules[0]["context"]))

+             else:

+                 module_map[key].append(module)

+ 

+         if expand:

+             added_module_list = new_modules

+             while True:

+                 added_module_list = self._add_new_dependencies(module_map, added_module_list)

+                 if len(added_module_list) == 0:

+                     break

+                 new_modules.extend(added_module_list)

+ 

+         return new_modules

file removed
-252

@@ -1,252 +0,0 @@ 

- # -*- coding: utf-8 -*-

- # Copyright (c) 2017  Red Hat, Inc.

- #

- # Permission is hereby granted, free of charge, to any person obtaining a copy

- # of this software and associated documentation files (the "Software"), to deal

- # in the Software without restriction, including without limitation the rights

- # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell

- # copies of the Software, and to permit persons to whom the Software is

- # furnished to do so, subject to the following conditions:

- #

- # The above copyright notice and this permission notice shall be included in all

- # copies or substantial portions of the Software.

- #

- # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR

- # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,

- # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE

- # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER

- # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,

- # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

- # SOFTWARE.

- #

- 

- import inspect

- import requests

- import re

- 

- from pdc_client import PDCClient

- from beanbag.bbexcept import BeanBagException

- 

- import odcs.server.utils

- from odcs.server import log

- 

- import gi

- gi.require_version('Modulemd', '1.0') # noqa

- from gi.repository import Modulemd

- 

- 

- class ModuleLookupError(Exception):

-     pass

- 

- 

- class PDC(object):

-     def __init__(self, config):

-         # pdc_url, pdc_develop and pdc_insecure should be avaiable in config

-         self.config = config

-         self.session = self.get_client_session()

- 

-     def get_client_session(self):

-         """

-         Return pdc_client.PDCClient instance

-         """

-         if 'ssl_verify' in inspect.getargspec(PDCClient.__init__).args:

-             # New API

-             return PDCClient(

-                 server=self.config.pdc_url,

-                 develop=self.config.pdc_develop,

-                 ssl_verify=not self.config.pdc_insecure,

-             )

-         else:

-             # Old API

-             return PDCClient(

-                 server=self.config.pdc_url,

-                 develop=self.config.pdc_develop,

-                 insecure=self.config.pdc_insecure,

-             )

- 

-     def variant_dict_from_str(self, module_str):

-         """

-         Method which parses module NSV string and returns a module info

-         dictionary instead.

- 

-         For more information about format of module_str, read:

-         https://pagure.io/modularity/blob/master/f/source/development/

-         building-modules/naming-policy.rst

- 

-         ODCS supports only N:S and N:S:V, because other combinations do not

-         have sense for composes.

- 

-         :param str module_str: string, the NS(V) of module

-         """

- 

-         # The new format can be distinguished by colon in module_str, because

-         # there is not module in Fedora with colon in a name or stream and it is

-         # now disallowed to create one. So if colon is there, it must be new

-         # naming policy format.

-         if module_str.find(":") != -1:

-             module_info = {}

-             module_info['variant_type'] = 'module'

- 

-             nsv = module_str.split(":")

-             if len(nsv) > 3:

-                 raise ValueError(

-                     "Module string \"%s\" is not allowed. "

-                     "Only NAME:STREAM or NAME:STREAM:VERSION is allowed.")

-             if len(nsv) > 2:

-                 module_info["variant_release"] = nsv[2]

-             if len(nsv) > 1:

-                 module_info["variant_version"] = nsv[1]

-             module_info["variant_id"] = nsv[0]

-             return module_info

-         else:

-             # Fallback to previous old format with '-' delimiter.

-             log.warn(

-                 "Variant file uses old format of module definition with '-'"

-                 "delimiter, please switch to official format defined by "

-                 "Modules Naming Policy.")

- 

-             module_info = {}

-             # The regex is matching a string which should represent the release number

-             # of a module. The release number is in format: "%Y%m%d%H%M%S"

-             release_regex = re.compile("^(\d){14}$")

- 

-             section_start = module_str.rfind('-')

-             module_str_first_part = module_str[section_start + 1:]

-             if release_regex.match(module_str_first_part):

-                 module_info['variant_release'] = module_str_first_part

-                 module_str = module_str[:section_start]

-                 section_start = module_str.rfind('-')

-                 module_info['variant_version'] = module_str[section_start + 1:]

-             else:

-                 module_info['variant_version'] = module_str_first_part

-             module_info['variant_id'] = module_str[:section_start]

-             module_info['variant_type'] = 'module'

- 

-             return module_info

- 

-     def get_latest_module(self, **kwargs):

-         """

-         Query PDC and return the latest version of the module specified by kwargs

- 

-         :param kwargs: query parameters in keyword arguments, should only provide

-                     valid query parameters supported by PDC's module query API.

-                     Must include 'variant_id' and 'variant_version'.

-         :raises ModuleLookupError: if the module couldn't be found

-         :return: the latest version of the module.

-         """

-         if 'active' not in kwargs:

-             kwargs['active'] = True

- 

-         if 'variant_release' not in kwargs:

-             # Ordering doesn't work

-             # https://github.com/product-definition-center/product-definition-center/issues/439,

-             # so if a release isn't specified, we have to get all builds and sort ourselves.

-             # We do this two-step to avoid downloading modulemd for all builds.

-             retval = self.get_modules(fields=['variant_release'], **kwargs)

-             if not retval:

-                 raise ModuleLookupError(

-                     "Failed to find module {variant_id}-{variant_version} in the PDC."

-                     .format(**kwargs))

-             kwargs['variant_release'] = str(max(int(d['variant_release']) for d in retval))

- 

-         retval = self.get_modules(**kwargs)

-         if not retval:

-             raise ModuleLookupError(

-                 "Failed to find module {variant_id}-{variant_version}-{variant_release} in the PDC."

-                 .format(**kwargs))

-         if len(retval) > 1:

-             raise ModuleLookupError(

-                 "Multiple modules found in the PDC for "

-                 "{variant_id}-{variant_version}-{variant_release}. "

-                 "This shouldn't happen, please contact the ODCS maintainers."

-                 .format(**kwargs))

-         return retval[0]

- 

-     def _add_new_dependencies(self, module_map, modules):

-         """

-         Helper for ``validate_module_list()`` - scans ``modules`` and adds any missing

-         requirements to ``module_map``.

- 

-         :param module_map: dict mapping module name:stream to module.

-         :param modules: the list of modules to scan for dependencies.

-         :return: a list of any modules that were added to ``module_map``.

-         """

- 

-         new_modules = []

-         for module in modules:

-             mmd = Modulemd.Module.new_from_string(module['modulemd'])

-             mmd.upgrade()

- 

-             # Check runtime dependency (name:stream) of a module and if this

-             # dependency is already in module_map/new_modules, do nothing.

-             # But otherwise get the latest module in this name:stream from PDC

-             # and add it to new_modules/module_map.

-             for deps in mmd.get_dependencies():

-                 for name, streams in deps.get_requires().items():

-                     for stream in streams.get():

-                         key = "%s:%s" % (name, stream)

-                         if key not in module_map:

-                             new_module = self.get_latest_module(

-                                 variant_id=name, variant_version=stream)

-                             new_modules.append(new_module)

-                             module_map[key] = new_module

- 

-         return new_modules

- 

-     def validate_module_list(self, modules, expand=True):

-         """

-         Given a list of modules, checks that there are no conflicting duplicates,

-         removes any exact duplicates, and if ``expand`` is set, recursively adds

-         in required modules until all dependencies are specified.

- 

-         :param modules: a list of modules as returned by ``get_modules()`` or

-                 ``get_latest_module()``

-         :param expand: if required modules should be included in the returned

-                 list.

-         :return: the list of modules with deduplication and expansion.

-         :raises ModuleLookupError: if a required module couldn't be found, or a

-                 conflict occurred when resolving dependencies.

-         """

- 

-         # List of modules we are going to return.

-         new_modules = []

-         # Temporary dict with "name:stream" as key and module dict as value.

-         module_map = {}

- 

-         for module in modules:

-             key = "%s:%s" % (module['variant_id'], module['variant_version'])

- 

-             # If this module is not in `new_modules` yet, add it there and

-             # continue to next module.

-             if key not in module_map:

-                 module_map[key] = module

-                 new_modules.append(module)

-                 continue

- 

-             # Check if there is already this module in new_modules, but in

-             # different version. If so, raise an exception.

-             old_module = module_map[key]

-             if (module['variant_release'] != old_module['variant_release']):

-                 raise ModuleLookupError("%s conflicts with %s" % (module['variant_uid'],

-                                                                   old_module['variant_uid']))

- 

-         if expand:

-             added_module_list = new_modules

-             while True:

-                 added_module_list = self._add_new_dependencies(module_map, added_module_list)

-                 if len(added_module_list) == 0:

-                     break

-                 new_modules.extend(added_module_list)

- 

-         return new_modules

- 

-     @odcs.server.utils.retry(wait_on=(requests.ConnectionError, BeanBagException), logger=log)

-     def get_modules(self, **kwargs):

-         """

-         Query PDC with specified query parameters and return a list of modules.

- 

-         :param kwargs: query parameters in keyword arguments

-         :return: a list of modules

-         """

-         modules = self.session['unreleasedvariants/'](page_size=-1, **kwargs)

-         return modules

@@ -45,9 +45,6 @@ 

          self.release_version = release_version

          self.bootable = False

          self.sigkeys = sigkeys.split(" ") if sigkeys else []

-         self.pdc_url = conf.pdc_url

-         self.pdc_insecure = conf.pdc_insecure

-         self.pdc_develop = conf.pdc_develop

          self.source_type = source_type

          self.source = source

          self.koji_profile = conf.koji_profile

file modified
+7 -1

@@ -221,7 +221,7 @@ 

              log.error(err)

              raise ValueError(err)

  

-         # Validate RAW_CONFIG source_type.

+         # Validate `source` based on `source_type`.

          if source_type == PungiSourceType.RAW_CONFIG:

              if len(source) > 1:

                  raise ValueError(

@@ -240,6 +240,12 @@ 

                  raise ValueError(

                      'Source "%s" does not exist in server configuration.' %

                      source_name)

+         elif source_type == PungiSourceType.MODULE:

+             for module_str in source:

+                 if len(module_str.split(":")) < 2:

+                     raise ValueError(

+                         'Module definition must be in "n:s", "n:s:v" or '

+                         '"n:s:v:c" format, but got %s' % module_str)

  

          source = ' '.join(source)

  

server/tests/mbs.py server/tests/pdc.py
file renamed
+28 -33

@@ -19,7 +19,7 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  #

- # Written by Owen Taylor <otaylor@redhat.com>

+ # Written by Jan Kaluza <jkaluza@redhat.com>

  

  from functools import wraps

  import json

@@ -54,15 +54,15 @@ 

          mmd.set_dependencies((deps, ))

  

      return {

-         'variant_id': name,

-         'variant_version': stream,

-         'variant_release': str(version),

-         'variant_uid': name + '-' + stream + '-' + str(version),

+         'name': name,

+         'stream': stream,

+         'version': str(version),

+         'context': '00000000',

          'modulemd': mmd.dumps()

      }

  

  

- TEST_PDC_MODULES_MMDv1 = [

+ TEST_MBS_MODULES_MMDv1 = [

      # test_backend.py

      make_module('moduleA', 'f26', 20170809000000,

                  {'moduleB': 'f26'}),

@@ -85,7 +85,7 @@ 

  ]

  

  

- TEST_PDC_MODULES_MMDv2 = [

+ TEST_MBS_MODULES_MMDv2 = [

      # test_backend.py

      make_module('moduleA', 'f26', 20170809000000,

                  {'moduleB': 'f26'}, 2),

@@ -108,7 +108,7 @@ 

  ]

  

  

- def mock_pdc(mdversion=2):

+ def mock_mbs(mdversion=2):

      """

      Decorator that sets up a test environment so that calls to the PDC to look up

      modules are redirected to return results from the TEST_MODULES array above.

@@ -116,43 +116,38 @@ 

      def wrapper(f):

          @wraps(f)

          def wrapped(*args, **kwargs):

-             def handle_unreleasedvariants(request):

+             def handle_module_builds(request):

                  query = parse_qs(urlparse(request.url).query)

-                 variant_id = query['variant_id']

-                 variant_version = query['variant_version']

-                 variant_release = query.get('variant_release', None)

+                 nsvc = query['nsvc'][0]

+                 nsvc_parts = nsvc.split(":")

+                 nsvc_keys = ["name", "stream", "version", "context"]

+                 nsvc_dict = {}

+                 for key, part in zip(nsvc_keys, nsvc_parts):

+                     nsvc_dict[key] = part

  

                  if mdversion == 1:

-                     modules = TEST_PDC_MODULES_MMDv1

+                     modules = TEST_MBS_MODULES_MMDv1

                  else:

-                     modules = TEST_PDC_MODULES_MMDv2

+                     modules = TEST_MBS_MODULES_MMDv2

  

-                 body = []

+                 body = {"items": [], "meta": {"total": 0}}

                  for module in modules:

-                     if module['variant_id'] not in variant_id:

+                     skip = False

+                     for key in nsvc_keys:

+                         if key in nsvc_dict and nsvc_dict[key] != module[key]:

+                             skip = True

+                             break

+                     if skip:

                          continue

-                     if module['variant_version'] not in variant_version:

-                         continue

-                     if variant_release is not None:

-                         if module['variant_release'] not in variant_release:

-                             continue

- 

-                     fields = query.get('fields', None)

-                     if fields is not None:

-                         return_module = {}

-                         for field in fields:

-                             return_module[field] = module[field]

-                     else:

-                         return_module = module

- 

-                     body.append(return_module)

+                     body["items"].append(module)

  

+                 body["meta"]["total"] = len(body["items"])

                  return (200, {}, json.dumps(body))

  

              responses.add_callback(

-                 responses.GET, conf.pdc_url + '/unreleasedvariants/',

+                 responses.GET, conf.mbs_url + '/1/module-builds/',

                  content_type='application/json',

-                 callback=handle_unreleasedvariants)

+                 callback=handle_module_builds)

  

              return f(*args, **kwargs)

  

file modified
+31 -31

@@ -29,7 +29,7 @@ 

  from odcs.server import db

  from odcs.server.models import Compose

  from odcs.common.types import COMPOSE_FLAGS, COMPOSE_RESULTS, COMPOSE_STATES

- from odcs.server.pdc import ModuleLookupError

+ from odcs.server.mbs import ModuleLookupError

  from odcs.server.pungi import PungiSourceType

  from odcs.server.backend import (resolve_compose, get_reusable_compose,

                                   generate_compose, generate_pulp_compose,

@@ -38,7 +38,7 @@ 

  import odcs.server.backend

  from .utils import ModelsBaseTest

  

- from .pdc import mock_pdc

+ from .mbs import mock_mbs

  

  thisdir = os.path.abspath(os.path.dirname(__file__))

  

@@ -57,11 +57,11 @@ 

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.koji_event, 1496834159)

  

-     @mock_pdc()

+     @mock_mbs()

      def test_resolve_compose_module(self):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

-             "moduleA-f26",

+             "moduleA:f26",

              COMPOSE_RESULTS["repository"], 3600)

          db.session.commit()

  

@@ -70,16 +70,16 @@ 

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.source,

-                          ' '.join(["moduleA:f26:20170809000000",

-                                    "moduleB:f26:20170808000000",

-                                    "moduleC:f26:20170807000000",

-                                    "moduleD:f26:20170806000000"]))

+                          ' '.join(["moduleA:f26:20170809000000:00000000",

+                                    "moduleB:f26:20170808000000:00000000",

+                                    "moduleC:f26:20170807000000:00000000",

+                                    "moduleD:f26:20170806000000:00000000"]))

  

-     @mock_pdc()

+     @mock_mbs()

      def test_resolve_compose_module_no_deps(self):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

-             "moduleA-f26 moduleA-f26",

+             "moduleA:f26 moduleA:f26",

              COMPOSE_RESULTS["repository"], 3600,

              flags=COMPOSE_FLAGS["no_deps"])

          db.session.commit()

@@ -88,9 +88,9 @@ 

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source, "moduleA:f26:20170809000000")

+         self.assertEqual(c.source, "moduleA:f26:20170809000000:00000000")

  

-     @mock_pdc()

+     @mock_mbs()

      def expect_module_lookup_error(self, source, match, flags=0):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

@@ -102,11 +102,11 @@ 

          with self.assertRaisesRegexp(ModuleLookupError, match):

              resolve_compose(c)

  

-     @mock_pdc(1)

+     @mock_mbs(1)

      def test_resolve_compose_module_mmdv1(self):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

-             "moduleA-f26",

+             "moduleA:f26",

              COMPOSE_RESULTS["repository"], 3600)

          db.session.commit()

  

@@ -115,16 +115,16 @@ 

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.source,

-                          ' '.join(["moduleA:f26:20170809000000",

-                                    "moduleB:f26:20170808000000",

-                                    "moduleC:f26:20170807000000",

-                                    "moduleD:f26:20170806000000"]))

+                          ' '.join(["moduleA:f26:20170809000000:00000000",

+                                    "moduleB:f26:20170808000000:00000000",

+                                    "moduleC:f26:20170807000000:00000000",

+                                    "moduleD:f26:20170806000000:00000000"]))

  

-     @mock_pdc(1)

+     @mock_mbs(1)

      def test_resolve_compose_module_no_deps_mmdv1(self):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

-             "moduleA-f26 moduleA-f26",

+             "moduleA:f26 moduleA:f26",

              COMPOSE_RESULTS["repository"], 3600,

              flags=COMPOSE_FLAGS["no_deps"])

          db.session.commit()

@@ -133,9 +133,9 @@ 

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source, "moduleA:f26:20170809000000")

+         self.assertEqual(c.source, "moduleA:f26:20170809000000:00000000")

  

-     @mock_pdc(1)

+     @mock_mbs(1)

      def expect_module_lookup_error_mmdv1(self, source, match, flags=0):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

@@ -148,34 +148,34 @@ 

              resolve_compose(c)

  

      def test_resolve_compose_module_not_found(self):

-         self.expect_module_lookup_error("moduleA-f30",

+         self.expect_module_lookup_error("moduleA:f30",

                                          "Failed to find")

  

      def test_resolve_compose_module_not_found2(self):

-         self.expect_module_lookup_error("moduleA-f26-00000000000000",

+         self.expect_module_lookup_error("moduleA:f26:00000000000000",

                                          "Failed to find")

  

      def test_resolve_compose_module_conflict(self):

          self.expect_module_lookup_error(

-             "moduleA-f26-20170809000000 moduleA-f26-20170805000000",

+             "moduleA:f26:20170809000000 moduleA:f26:20170805000000",

              "conflicts with")

  

-     @mock_pdc()

+     @mock_mbs()

      def test_resolve_compose_module_not_conflict(self):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

-             "moduleB-f26 moduleB-f27",

+             "moduleB:f26 moduleB:f27",

              COMPOSE_RESULTS["repository"], 3600,

              flags=COMPOSE_FLAGS["no_deps"])

          db.session.commit()

  

          resolve_compose(c)

  

-     @mock_pdc(1)

+     @mock_mbs(1)

      def test_resolve_compose_module_not_conflict_mmdv1(self):

          c = Compose.create(

              db.session, "me", PungiSourceType.MODULE,

-             "moduleB-f26 moduleB-f27",

+             "moduleB:f26 moduleB:f27",

              COMPOSE_RESULTS["repository"], 3600,

              flags=COMPOSE_FLAGS["no_deps"])

          db.session.commit()

@@ -184,8 +184,8 @@ 

  

      def test_resolve_compose_module_dep_not_found(self):

          self.expect_module_lookup_error(

-             "moduleB-f26 moduleB-f27",

-             "Failed to find module moduleC-f27 in the PDC.")

+             "moduleB:f26 moduleB:f27",

+             "Failed to find module moduleC:f27 in the MBS.")

  

      @patch("odcs.server.backend.create_koji_session")

      def test_resolve_compose_repo_no_override_koji_event(

@@ -33,7 +33,7 @@ 

  from odcs.server.pungi import PungiSourceType

  

  from .utils import ModelsBaseTest

- from .pdc import mock_pdc

+ from .mbs import mock_mbs

  

  thisdir = os.path.abspath(os.path.dirname(__file__))

  

@@ -98,7 +98,7 @@ 

          db.session.add(c)

          db.session.commit()

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build(self, wrf, execute_cmd):

@@ -116,7 +116,7 @@ 

          self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

          self.assertEqual(self.composer.currently_generating, [1])

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build_module_without_release(

@@ -133,7 +133,7 @@ 

          self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

          self.assertEqual(c.source, "testmodule:master:20170515074419")

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build_colon_separator(self, wrf, execute_cmd):

@@ -151,7 +151,7 @@ 

          self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

          self.assertEqual(self.composer.currently_generating, [1])

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build_module_without_release_colon_separator(

@@ -168,10 +168,10 @@ 

          self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

          self.assertEqual(c.source, "testmodule:master:20170515074419")

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

-     def test_submit_build_module_without_release_not_in_pdc(

+     def test_submit_build_module_without_release_not_in_mbs(

              self, wrf, execute_cmd):

  

          self._add_module_compose("testmodule2-master")

@@ -182,7 +182,7 @@ 

          c = self._wait_for_compose_state(1, COMPOSE_STATES["failed"])

          self.assertEqual(c.state, COMPOSE_STATES["failed"])

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.backend.validate_pungi_compose")

      def test_submit_build_reuse_repo(self, mock_validate_pungi_compose):

          self._add_repo_composes()

@@ -197,7 +197,7 @@ 

          self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

          mock_validate_pungi_compose.assert_called_once()

  

-     @mock_pdc

+     @mock_mbs

      def test_submit_build_reuse_module(self):

          self._add_module_compose()

          self._add_module_compose()

@@ -214,7 +214,7 @@ 

                           os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

          self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

  

-     @mock_pdc

+     @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build_no_reuse_module(self, wrf, execute_cmd):

file modified
+33 -19

@@ -243,7 +243,7 @@ 

                                           hour=0, minute=0, second=0)

          with freeze_time(self.initial_datetime):

              self.c1 = Compose.create(

-                 db.session, "unknown", PungiSourceType.MODULE, "testmodule-master",

+                 db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

                  COMPOSE_RESULTS["repository"], 60)

              self.c2 = Compose.create(

                  db.session, "me", PungiSourceType.KOJI_TAG, "f26",

@@ -279,13 +279,13 @@ 

              ]

  

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-master'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

              data = json.loads(rv.get_data(as_text=True))

  

          expected_json = {'source_type': 2, 'state': 0, 'time_done': None,

                           'state_name': 'wait',

                           'state_reason': None,

-                          'source': u'testmodule-master',

+                          'source': u'testmodule:master',

                           'owner': u'dev',

                           'result_repo': 'http://localhost/odcs/latest-odcs-%d-1/compose/Temporary' % data['id'],

                           'result_repofile': 'http://localhost/odcs/latest-odcs-%d-1/compose/Temporary/odcs-%d.repo' % (data['id'], data['id']),

@@ -444,7 +444,7 @@ 

  

          self.assertEqual(data['id'], 3)

          self.assertEqual(data['state_name'], 'wait')

-         self.assertEqual(data['source'], 'testmodule-master')

+         self.assertEqual(data['source'], 'testmodule:master')

          self.assertEqual(data['time_removed'], None)

  

          c = db.session.query(Compose).filter(Compose.id == 3).one()

@@ -465,7 +465,7 @@ 

  

          self.assertEqual(data['id'], 3)

          self.assertEqual(data['state_name'], 'wait')

-         self.assertEqual(data['source'], 'testmodule-master')

+         self.assertEqual(data['source'], 'testmodule:master')

          self.assertEqual(data['time_removed'], None)

  

          c = db.session.query(Compose).filter(Compose.id == 3).one()

@@ -520,6 +520,20 @@ 

          self.assertEqual(

              data['message'], 'Unknown source type "unknown"')

  

+     def test_submit_module_build_wrong_source(self):

+         with self.test_request_context(user='dev2'):

+             flask.g.oidc_scopes = [

+                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+             ]

+ 

+             rv = self.client.post('/api/1/composes/', data=json.dumps(

+                 {'source': {'type': 'module', 'source': 'testmodule:master x'}}))

+             data = json.loads(rv.get_data(as_text=True))

+ 

+         self.assertEqual(

+             data["message"], 'Module definition must be in "n:s", "n:s:v" or '

+             '"n:s:v:c" format, but got x')

+ 

      def test_submit_build_per_user_source_type_allowed(self):

          with self.test_request_context(user='dev2'):

              flask.g.oidc_scopes = [

@@ -527,7 +541,7 @@ 

              ]

  

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': '/path'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["state_name"], "wait")

@@ -554,7 +568,7 @@ 

              ]

  

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': '/path'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["state_name"], "wait")

@@ -578,7 +592,7 @@ 

          resp = self.client.get('/api/1/composes/1')

          data = json.loads(resp.get_data(as_text=True))

          self.assertEqual(data['id'], 1)

-         self.assertEqual(data['source'], "testmodule-master")

+         self.assertEqual(data['source'], "testmodule:master")

  

      def test_query_composes(self):

          resp = self.client.get('/api/1/composes/')

@@ -641,7 +655,7 @@ 

      def test_delete_compose(self):

          with freeze_time(self.initial_datetime) as frozen_datetime:

              c3 = Compose.create(

-                 db.session, "unknown", PungiSourceType.MODULE, "testmodule-master",

+                 db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

                  COMPOSE_RESULTS["repository"], 60)

              c3.state = COMPOSE_STATES['done']

              db.session.add(c3)

@@ -675,7 +689,7 @@ 

          for state in COMPOSE_STATES.keys():

              if state not in ['done', 'failed']:

                  new_c = Compose.create(

-                     db.session, "unknown", PungiSourceType.MODULE, "testmodule-master",

+                     db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

                      COMPOSE_RESULTS["repository"], 60)

                  new_c.state = COMPOSE_STATES[state]

                  db.session.add(new_c)

@@ -731,7 +745,7 @@ 

              ]

  

              resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-master'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

              data = json.loads(resp.get_data(as_text=True))

  

          self.assertEqual(resp.status, '403 FORBIDDEN')

@@ -746,17 +760,17 @@ 

              ]

  

              resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-rawhide'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:rawhide'}}))

          db.session.expire_all()

  

          self.assertEqual(resp.status, '200 OK')

          self.assertEqual(resp.status_code, 200)

-         c = db.session.query(Compose).filter(Compose.source == 'testmodule-rawhide').one()

+         c = db.session.query(Compose).filter(Compose.source == 'testmodule:rawhide').one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_can_delete_compose_with_user_in_configured_groups(self):

          c3 = Compose.create(

-             db.session, "unknown", PungiSourceType.MODULE, "testmodule-testbranch",

+             db.session, "unknown", PungiSourceType.MODULE, "testmodule:testbranch",

              COMPOSE_RESULTS["repository"], 60)

          c3.state = COMPOSE_STATES['done']

          db.session.add(c3)

@@ -790,7 +804,7 @@ 

              ]

  

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-master'}, 'seconds-to-live': 60 * 60 * 12}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}, 'seconds-to-live': 60 * 60 * 12}))

              data = json.loads(rv.get_data(as_text=True))

  

          time_submitted = datetime.strptime(data['time_submitted'], "%Y-%m-%dT%H:%M:%SZ")

@@ -812,7 +826,7 @@ 

              ]

  

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-master'}, 'seconds-to-live': 60 * 60 * 24 * 7}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}, 'seconds-to-live': 60 * 60 * 24 * 7}))

              data = json.loads(rv.get_data(as_text=True))

  

          time_submitted = datetime.strptime(data['time_submitted'], "%Y-%m-%dT%H:%M:%SZ")

@@ -833,7 +847,7 @@ 

              ]

  

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-master'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

              data = json.loads(rv.get_data(as_text=True))

  

          time_submitted = datetime.strptime(data['time_submitted'], "%Y-%m-%dT%H:%M:%SZ")

@@ -847,13 +861,13 @@ 

  

          with self.test_request_context():

              rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule-master'}}))

+                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

              data = json.loads(rv.get_data(as_text=True))

  

          expected_json = {'source_type': 2, 'state': 0, 'time_done': None,

                           'state_name': 'wait',

                           'state_reason': None,

-                          'source': u'testmodule-master',

+                          'source': u'testmodule:master',

                           'owner': u'unknown',

                           'result_repo': 'http://localhost/odcs/latest-odcs-%d-1/compose/Temporary' % data['id'],

                           'result_repofile': 'http://localhost/odcs/latest-odcs-%d-1/compose/Temporary/odcs-%d.repo' % (data['id'], data['id']),

In this PR, ODCS starts using MBS API instead of PDC API.

Does it make sense to allow user to request compose with modules that only have name specified? I think at least name and stream should be required.

You can actually use "ready" directly if you want.

This could be:

if not modules["meta"]["total"]:

So same version but different context is ok?

I agree with @qwan. :thumbsup: otherwise.

Yes, we pass all the contexts to Pungi when context is not specified, but we should only pass single version to it.

1 new commit added

  • Allow only modules in n:s, n:s:v or n:s:v:c format as input.
9 months ago

Pull-Request has been merged by jkaluza

9 months ago