#1543 Rebase v3 branch on top of latest master branch
Closed 4 years ago by qwan. Opened 4 years ago by qwan.
qwan/fm-orchestrator rebase-v3  into  v3

file modified
+1
@@ -24,3 +24,4 @@ 

  mbstest.db

  htmlcov/

  test.env.yaml

+ report.html

file modified
+24 -73
@@ -7,73 +7,36 @@ 

      echo "export MODULE_BUILD_SERVICE_DEVELOPER_ENV=1" > /etc/profile.d/module_build_service_developer_env.sh

      source /etc/profile.d/module_build_service_developer_env.sh

      dnf install -y \

-         fedmsg-hub \

-         fedmsg-relay \

-         fedpkg \

-         gcc \

-         gcc-c++ \

-         git \

-         koji \

-         krb5-devel \

-         krb5-workstation \

-         libffi-devel \

-         mock-scm \

-         openssl-devel \

-         python \

-         python-devel \

-         python2-dnf \

-         python-docutils \

-         python-flask \

-         python2-libmodulemd \

-         python-m2ext \

-         python-mock \

-         python-qpid \

-         python-solv \

-         python-sqlalchemy \

-         python-futures \

-         python2-pungi \

-         python3 \

-         python3-devel \

-         python3-docutils \

-         python3-pungi \

-         python3-virtualenv \

-         redhat-rpm-config \

-         redhat-rpm-config \

-         rpm-build \

-         swig \

-         sqlite \

-         bash-completion \

-         wget \

-         which

- 

+       bash-completion \

+       python3-celery \

+       python3-flake8 \

+       python3-mock \

+       python3-pytest \

+       python3-pytest-cov \

+       python3-tox \

+       rpm-build \

+       sqlite

+     # Install the runtime dependencies from the module-build-service spec file

+     curl -s https://src.fedoraproject.org/rpms/module-build-service/raw/master/f/module-build-service.spec -o /tmp/module-build-service.spec

+     dnf install -y $(rpmspec --parse /tmp/module-build-service.spec  | grep ^Requires: | tr -s ' ' | cut -d ' ' -f2)

      mbs_config_dir=/etc/module-build-service

      [ -e "$mbs_config_dir" ] || mkdir "$mbs_config_dir"

-     cp -r /opt/module_build_service/conf/* "$mbs_config_dir"

+     cd /opt/module_build_service

+     cp -r conf/* "$mbs_config_dir"

+ 

+     # Workaround because python3-koji has no egg-info file

+     sed -i '/koji/d' requirements.txt

+     # Remove Python 2 only dependencies

+     sed -i '/futures/d' requirements.txt

+     sed -i '/enum34/d' requirements.txt

+ 

+     python3 setup.py develop --no-deps

+     python3 setup.py egg_info

  SCRIPT

  

  $make_devenv = <<DEVENV

    set -e

-   env_dir=~/devenv

-   pip=${env_dir}/bin/pip

-   py=${env_dir}/bin/python

    code_dir=/opt/module_build_service

- 

-   test -e $env_dir && rm -rf $env_dir

- 

-   # solv is not availabe from pypi.org. libsolv has to be installed by dnf.

-   (cd; virtualenv -p python2 --system-site-packages devenv)

- 

-   $pip install --upgrade pip kobo

-   $pip install -r $code_dir/test-requirements.txt

-   $pip install ipython

- 

-   cd $code_dir

-   $py setup.py develop

-   $py setup.py egg_info

- 

-   if ! grep ". $env_dir/bin/activate" ~/.bashrc >/dev/null; then

-       echo ". $env_dir/bin/activate" >> ~/.bashrc

-   fi

    if ! grep "^cd $code_dir" ~/.bashrc >/dev/null; then

        # Go to working directory after login

        echo "cd $code_dir" >> ~/.bashrc
@@ -81,7 +44,7 @@ 

  DEVENV

  

  $config_pgsql = <<PGSQL

- dnf install -y postgresql postgresql-server python2-psycopg2

+ dnf install -y postgresql postgresql-server python3-psycopg2

  

  pg_hba_conf=/var/lib/pgsql/data/pg_hba.conf

  
@@ -112,22 +75,12 @@ 

  psql -U postgres -h 127.0.0.1 -c "DROP DATABASE IF EXISTS mbstest"

  psql -U postgres -h 127.0.0.1 -c "CREATE DATABASE mbstest"

  

- bashrc=/home/vagrant/.bashrc

- 

  echo "******** Run Tests with PostgreSQL ********"

  echo "Set this environment variable to test with PostgreSQL"

  echo "export DATABASE_URI=postgresql+psycopg2://postgres:@127.0.0.1/mbstest"

  echo

  PGSQL

  

- $script_services = <<SCRIPT_SERVICES

-     bin_dir=~/devenv/bin

-     cd /opt/module_build_service

-     $bin_dir/mbs-upgradedb > /tmp/mbs-base.out 2>&1

-     $bin_dir/fedmsg-relay < /dev/null >& /tmp/fedmsg-relay.out &

-     $bin_dir/fedmsg-hub < /dev/null >& /tmp/fedmsg-hub.out &

-     $bin_dir/mbs-frontend < /dev/null >& /tmp/mbs-frontend.out &

- SCRIPT_SERVICES

  

  Vagrant.configure("2") do |config|

    config.vm.box = "fedora/31-cloud-base"
@@ -135,12 +88,10 @@ 

    # Disable the default share

    config.vm.synced_folder ".", "/vagrant", disabled: true

    config.vm.network "forwarded_port", guest_ip: "0.0.0.0", guest: 5000, host: 5000

-   config.vm.network "forwarded_port", guest_ip: "0.0.0.0", guest: 13747, host: 13747

    config.vm.provision "shell", inline: $script

    config.vm.provision "shell", inline: "usermod -a -G mock vagrant"

    config.vm.provision "shell", inline: $config_pgsql

    config.vm.provision "shell", inline: $make_devenv, privileged: false

-   config.vm.provision "shell", inline: $script_services, privileged: false, run: "always"

    config.vm.provider "libvirt" do |v, override|

      override.vm.synced_folder "./", "/opt/module_build_service", type: "sshfs"

      v.memory = 1024

file modified
+23 -35
@@ -19,26 +19,15 @@ 

      HOST = "0.0.0.0"

      PORT = 5000

  

-     # Global network-related values, in seconds

-     NET_TIMEOUT = 120

-     NET_RETRY_INTERVAL = 30

- 

-     SYSTEM = "koji"

-     MESSAGING = "fedmsg"  # or amq

      MESSAGING_TOPIC_PREFIX = ["org.fedoraproject.prod"]

      KOJI_CONFIG = "/etc/module-build-service/koji.conf"

      KOJI_PROFILE = "koji"

      ARCHES = ["i686", "armv7hl", "x86_64"]

-     ALLOW_ARCH_OVERRIDE = False

      KOJI_REPOSITORY_URL = "https://kojipkgs.fedoraproject.org/repos"

-     KOJI_TAG_PREFIXES = ["module", "scrmod"]

-     KOJI_ENABLE_CONTENT_GENERATOR = True

-     CHECK_FOR_EOL = False

      PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1"

      PDC_INSECURE = False

      PDC_DEVELOP = True

      SCMURLS = ["https://src.fedoraproject.org/modules/"]

-     YAML_SUBMIT_ALLOWED = False

  

      # How often should we resort to polling, in seconds

      # Set to zero to disable polling
@@ -48,23 +37,10 @@ 

      # and be in the build state at a time. Set this to 0 for no restrictions

      NUM_CONCURRENT_BUILDS = 5

  

-     ALLOW_CUSTOM_SCMURLS = False

- 

      RPMS_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/rpms/"

-     RPMS_ALLOW_REPOSITORY = False

      RPMS_DEFAULT_CACHE = "http://pkgs.fedoraproject.org/repo/pkgs/"

-     RPMS_ALLOW_CACHE = False

  

      MODULES_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/modules/"

-     MODULES_ALLOW_REPOSITORY = False

-     MODULES_ALLOW_SCRATCH = False

- 

-     ALLOWED_GROUPS = {"packager"}

- 

-     ALLOWED_GROUPS_TO_IMPORT_MODULE = set()

- 

-     # Available backends are: console and file

-     LOG_BACKEND = "console"

  

      # Path to log file when LOG_BACKEND is set to "file".

      LOG_FILE = "module_build_service.log"
@@ -89,14 +65,26 @@ 

      AMQ_PRIVATE_KEY_FILE = "/etc/module_build_service/msg-m8y-client.key"

      AMQ_TRUSTED_CERT_FILE = "/etc/module_build_service/Root-CA.crt"

  

-     # Disable Client Authorization

-     NO_AUTH = False

+     # Configs for running tasks asynchronously with Celery

+     # For details of Celery configs, refer to Celery documentation:

+     # https://docs.celeryproject.org/en/latest/userguide/configuration.html

+     #

+     # Each config name consists of namespace CELERY_ and the new Celery config

+     # name converted to upper case. For example the broker url, Celery config

+     # name is broker_url, then as you can below, the corresponding config name

+     # in MBS is CELERY_BROKER_URL.

+     CELERY_BROKER_URL = ""

+     CELERY_RESULT_BACKEND = ""

+     CELERY_IMPORTS = [

+         "module_build_service.scheduler.handlers.components",

+         "module_build_service.scheduler.handlers.modules",

+         "module_build_service.scheduler.handlers.repos",

+         "module_build_service.scheduler.handlers.tags",

+         "module_build_service.scheduler.handlers.greenwave",

+     ]

  

  

  class TestConfiguration(BaseConfiguration):

-     BUILD_LOGS_DIR = "/tmp"

-     BUILD_LOGS_NAME_FORMAT = "build-{id}.log"

-     LOG_BACKEND = "console"

      LOG_LEVEL = "debug"

      SQLALCHEMY_DATABASE_URI = environ.get(

          "DATABASE_URI", "sqlite:///{0}".format(path.join(dbdir, "mbstest.db")))
@@ -117,8 +105,6 @@ 

  

      KOJI_REPOSITORY_URL = "https://kojipkgs.stg.fedoraproject.org/repos"

      SCMURLS = ["https://src.stg.fedoraproject.org/modules/"]

-     AUTH_METHOD = "oidc"

-     RESOLVER = "db"

  

      ALLOWED_GROUPS_TO_IMPORT_MODULE = {"mbs-import-module"}

  
@@ -129,6 +115,9 @@ 

  

      STREAM_SUFFIXES = {r"^el\d+\.\d+\.\d+\.z$": 0.1}

  

+     # Ensures task.delay executes locally instead of scheduling a task to a queue.

+     CELERY_TASK_ALWAYS_EAGER = True

+ 

  

  class ProdConfiguration(BaseConfiguration):

      pass
@@ -139,9 +128,6 @@ 

      LOG_LEVEL = "debug"

      MESSAGING = "in_memory"

  

-     ARCH_AUTODETECT = True

-     ARCH_FALLBACK = "x86_64"

- 

      ALLOW_CUSTOM_SCMURLS = True

      RESOLVER = "mbs"

      RPMS_ALLOW_REPOSITORY = True
@@ -154,4 +140,6 @@ 

  

  class DevConfiguration(LocalBuildConfiguration):

      DEBUG = True

-     LOG_BACKEND = "console"

+ 

+     CELERY_BROKER_URL = "redis://localhost:6379/0"

+     CELERY_RESULT_BACKEND = "redis://localhost:6379/0"

file modified
+6 -1
@@ -12,6 +12,7 @@ 

  # --with-pgsql: run tests with PostgreSQL, otherwise SQLite is used.

  # --no-tty: don't use tty for containers

  # --sudo: run Docker via sudo

+ # --podman: use Podman instead of Docker

  # --no-pull: don't update Docker images

  #

  # Please note that, both of them can have arbitrary value as long as one of
@@ -22,6 +23,7 @@ 

  with_pgsql=

  no_tty=

  use_sudo=

+ use_podman=

  do_pull=1

  

  while (( "$#" )); do
@@ -30,6 +32,7 @@ 

          --with-pgsql) with_pgsql=1 ;;

          --no-tty) no_tty=1 ;;

          --sudo) use_sudo=1 ;;

+         --podman) use_podman=1 ;;

          --no-pull) do_pull= ;;

          *) break ;;

      esac
@@ -58,7 +61,9 @@ 

  if [ -n "$with_pgsql" ]; then

      test_container_name="${test_container_name}-pgsql"

  fi

- if [ -n "$use_sudo" ]; then

+ if [ -n "$use_podman" ]; then

+     docker="podman"

+ elif [ -n "$use_sudo" ]; then

      # use sudo for docker

      docker="sudo /usr/bin/docker"

  else

file modified
+2
@@ -42,6 +42,8 @@ 

      python-solv \

      python-sqlalchemy \

      python-tox \

+     python2-distro \

+     python2-celery \

      python2-libmodulemd2 \

      python2-pyyaml \

      python2-pungi \

@@ -9,6 +9,8 @@ 

      git-core \

      createrepo_c \

      rsync \

+     python3-distro \

+     python3-celery \

      python3-fedmsg \

      python3-kobo-rpmlib \

      python3-rpm \

file modified
+1 -1
@@ -22,4 +22,4 @@ 

  

  # Since tox seems to ignore `usedevelop` when we have `sitepackages` on, we have to run it manually

  python3 setup.py develop --no-deps

- /usr/bin/tox -e flake8,py3 "$@"

+ /usr/bin/tox -e flake8,py3,intflake "$@"

file modified
+12
@@ -1,6 +1,18 @@ 

  Change Log

  ==========

  

+ v2.30.4

+ -------

+ * allow component reuse in some cases when a component is added

+ 

+ v2.30.3

+ -------

+ * Fix a local build bug caused by the refactoring of how database sessions are handled

+ 

+ v2.30.2

+ -------

+ * Fixed bugs that caused local builds to fail on Fedora 31

+ 

  v2.30.1

  -------

  * Fixed a bug that caused local builds to fail depending on the version of DNF being used

file modified
+1
@@ -31,6 +31,7 @@ 

  * ``--with-pgsql``: run tests with PostgreSQL database.

  * ``--no-tty``: don't use tty for containers

  * ``--sudo``: run Docker via sudo

+ * ``--podman``: use Podman instead of Docker

  * ``--no-pull``: don't update Docker images

  

  For example, ``contrib/run-unittests.sh --py3 --with-pgsql``.

@@ -19,10 +19,12 @@ 

  """

  

  import pkg_resources

+ from celery import Celery

  from flask import Flask, has_app_context, url_for

  from flask_sqlalchemy import SQLAlchemy

  from sqlalchemy.pool import StaticPool

  from logging import getLogger

+ 

  import gi  # noqa

  gi.require_version("Modulemd", "2.0")  # noqa

  from gi.repository import Modulemd  # noqa
@@ -46,6 +48,17 @@ 

  

  conf = init_config(app)

  

+ celery_app = Celery("module-build-service")

+ # Convert config names specific for Celery like this:

+ # celery_broker_url -> broker_url

+ celery_configs = {

+     name[7:]: getattr(conf, name)

+     for name in dir(conf) if name.startswith("celery_")

+ }

+ # Only allow a single process so that tasks are always serial per worker

+ celery_configs["worker_concurrency"] = 1

+ celery_app.conf.update(**celery_configs)

+ 

  

  class MBSSQLAlchemy(SQLAlchemy):

      """

@@ -1,6 +1,7 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

  import calendar

+ import distro

  import hashlib

  import logging

  import json
@@ -293,12 +294,12 @@ 

  

      def _get_buildroot(self):

          version = pkg_resources.get_distribution("module-build-service").version

-         distro = platform.linux_distribution()

+         distro_info = distro.linux_distribution()

          ret = {

              u"id": 1,

              u"host": {

                  u"arch": text_type(platform.machine()),

-                 u"os": u"%s %s" % (distro[0], distro[1]),

+                 u"os": u"%s %s" % (distro_info[0], distro_info[1]),

              },

              u"content_generator": {

                  u"name": u"module-build-service",

@@ -24,12 +24,12 @@ 

  import module_build_service.scm

  import module_build_service.utils

  from module_build_service.builder.utils import execute_cmd

- from module_build_service.builder.koji_backports import ClientSession as KojiClientSession

  from module_build_service.db_session import db_session

  from module_build_service.errors import ProgrammingError

  

- from module_build_service.builder.base import GenericBuilder

+ from module_build_service.builder import GenericBuilder

  from module_build_service.builder.KojiContentGenerator import KojiContentGenerator

+ from module_build_service.scheduler import events

  from module_build_service.utils import get_reusable_components, get_reusable_module, set_locale

  

  logging.basicConfig(level=logging.DEBUG)
@@ -487,7 +487,7 @@ 

  

          address = koji_config.server

          log.info("Connecting to koji %r.", address)

-         koji_session = KojiClientSession(address, opts=koji_config)

+         koji_session = koji.ClientSession(address, opts=koji_config)

  

          if not login:

              return koji_session
@@ -528,11 +528,13 @@ 

          # only if we are creating the build_tag for first time.

          build_tag_exists = self.koji_session.getTag(self.tag_name + "-build")

  

+         tag_perm = self.config.koji_tag_permission

+ 

          # Create or update individual tags

          # the main tag needs arches so pungi can dump it

-         self.module_tag = self._koji_create_tag(self.tag_name, self.arches, perm="admin")

+         self.module_tag = self._koji_create_tag(self.tag_name, self.arches, perm=tag_perm)

          self.module_build_tag = self._koji_create_tag(

-             self.tag_name + "-build", self.arches, perm="admin")

+             self.tag_name + "-build", self.arches, perm=tag_perm)

  

          buildopts = self.mmd.get_buildopts()

          if buildopts and buildopts.get_rpm_whitelist():
@@ -698,6 +700,11 @@ 

          :param component_build: a ComponentBuild object

          :return: a list of msgs that MBS needs to process

          """

+         # Imported here because of circular dependencies.

+         from module_build_service.scheduler.handlers.tags import tagged as tagged_handler

+         from module_build_service.scheduler.handlers.components import (

+             build_task_finalize as build_task_finalize_handler)

+ 

          opts = {"latest": True, "package": component_build.package, "inherit": False}

          build_tagged = self.koji_session.listTagged(self.module_build_tag["name"], **opts)

          dest_tagged = None
@@ -725,10 +732,10 @@ 

                      nvr = "{name}-{version}-{release}".format(**untagged_build)

                      build = self.koji_session.getBuild(nvr)

                      break

-         further_work = []

-         # If the build doesn't exist, then return

+ 

+         # If the build doesn't exist, then return False

          if not build:

-             return further_work

+             return False

  

          # Start setting up MBS' database to use the existing build

          log.info('Skipping build of "{0}" since it already exists.'.format(build["nvr"]))
@@ -739,19 +746,11 @@ 

          component_build.state_reason = "Found existing build"

          nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr)

          # Trigger a completed build message

-         further_work.append(

-             module_build_service.messaging.KojiBuildChange(

-                 "recover_orphaned_artifact: fake message",

-                 build["build_id"],

-                 build["task_id"],

-                 koji.BUILD_STATES["COMPLETE"],

-                 component_build.package,

-                 nvr_dict["version"],

-                 nvr_dict["release"],

-                 component_build.module_build.id,

-             )

-         )

- 

+         args = (

+             "recover_orphaned_artifact: fake message", build["build_id"], build["task_id"],

+             koji.BUILD_STATES["COMPLETE"], component_build.package, nvr_dict["version"],

+             nvr_dict["release"], component_build.module_build.id, None)

+         events.scheduler.add(build_task_finalize_handler, args)

          component_tagged_in = []

          if build_tagged:

              component_tagged_in.append(self.module_build_tag["name"])
@@ -770,15 +769,11 @@ 

                  'The build being skipped isn\'t tagged in the "{0}" tag. Will send a message to '

                  "the tag handler".format(tag)

              )

-             further_work.append(

-                 module_build_service.messaging.KojiTagChange(

-                     "recover_orphaned_artifact: fake message",

-                     tag,

-                     component_build.package,

-                     component_build.nvr,

-                 )

-             )

-         return further_work

+             args = ("recover_orphaned_artifact: fake message", tag, component_build.package,

+                     component_build.nvr)

+             events.scheduler.add(tagged_handler, args)

+ 

+         return True

  

      def build(self, artifact_name, source):

          """
@@ -1336,6 +1331,9 @@ 

              the module build in the build system.

          :return: list of architectures

          """

+         if not module.koji_tag:

+             log.warning("No Koji tag associated with module %r", module)

+             return []

          koji_session = KojiModuleBuilder.get_session(conf, login=False)

          tag = koji_session.getTag(module.koji_tag)

          if not tag:

@@ -14,9 +14,8 @@ 

  import module_build_service.scm

  import module_build_service.utils

  import module_build_service.scheduler

- import module_build_service.scheduler.consumer

  

- from module_build_service.builder.base import GenericBuilder

+ from module_build_service.builder import GenericBuilder

  from module_build_service.builder.utils import (

      create_local_repo_from_koji_tag,

      execute_cmd,
@@ -26,6 +25,7 @@ 

  from module_build_service.utils.general import mmd_to_str

  from module_build_service.db_session import db_session

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

+ from module_build_service.scheduler import events

  

  from module_build_service import models

  
@@ -109,7 +109,13 @@ 

              os.makedirs(self.configdir)

  

          # Generate path to mock config and add local repository there.

-         self._add_repo("localrepo", "file://" + self.resultsdir, "metadata_expire=1\n")

+         # Set skip_if_unavailable=True since the repo isn't available until after

+         # module-build-macros is built.

+         self._add_repo(

+             "localrepo",

+             "file://" + self.resultsdir,

+             "metadata_expire=1\nskip_if_unavailable=True\n",

+         )

  

          # Remove old files from the previous build of this tag but only

          # before the first build is done, otherwise we would remove files
@@ -311,6 +317,7 @@ 

          pass

  

      def buildroot_add_artifacts(self, artifacts, install=False):

+         from module_build_service.scheduler.handlers.repos import done as repos_done_handler

          self._createrepo()

  

          # TODO: This is just hack to install module-build-macros into the
@@ -323,9 +330,7 @@ 

                  self.groups.append("module-build-macros")

                  self._write_mock_config()

  

-         from module_build_service.scheduler.consumer import fake_repo_done_message

- 

-         fake_repo_done_message(self.tag_name)

+         events.scheduler.add(repos_done_handler, ("fake_msg", self.tag_name + "-build"))

  

      def tag_artifacts(self, artifacts):

          pass
@@ -367,22 +372,28 @@ 

                  repo_name = tag = source

                  koji_config = get_koji_config(self.config)

                  koji_session = koji.ClientSession(koji_config.server, opts=koji_config)

+                 # Check to see if there are any external repos tied to the tag

+                 for ext_repo in koji_session.getTagExternalRepos(tag):

+                     self._add_repo(ext_repo["external_repo_name"], ext_repo["url"])

+ 

                  repo = koji_session.getRepo(repo_name)

                  if repo:

                      baseurl = koji.PathInfo(topdir=koji_config.topurl).repo(repo["id"], repo_name)

                      baseurl = "{0}/{1}/".format(baseurl, self.arch)

                  else:

                      repo_dir = os.path.join(self.config.cache_dir, "koji_tags", tag)

-                     create_local_repo_from_koji_tag(

+                     should_add_repo = create_local_repo_from_koji_tag(

                          self.config, tag, repo_dir, [self.arch, "noarch"])

+                     if not should_add_repo:

+                         continue

                      baseurl = "file://" + repo_dir

-                 # Check to see if there are any external repos tied to the tag

-                 for ext_repo in koji_session.getTagExternalRepos(repo_name):

-                     self._add_repo(ext_repo["external_repo_name"], ext_repo["url"])

+ 

              self._add_repo(repo_name, baseurl)

          self._write_mock_config()

  

      def _send_build_change(self, state, source, build_id):

+         from module_build_service.scheduler.handlers.components import (

+             build_task_finalize as build_task_finalize_handler)

          try:

              nvr = kobo.rpmlib.parse_nvr(source)

          except ValueError:
@@ -390,16 +401,10 @@ 

  

          # build_id=1 and task_id=1 are OK here, because we are building just

          # one RPM at the time.

-         msg = module_build_service.messaging.KojiBuildChange(

-             msg_id="a faked internal message",

-             build_id=build_id,

-             task_id=build_id,

-             build_name=nvr["name"],

-             build_new_state=state,

-             build_release=nvr["release"],

-             build_version=nvr["version"],

-         )

-         module_build_service.scheduler.consumer.work_queue_put(msg)

+         args = (

+             "a faked internal message", build_id, build_id, state, nvr["name"], nvr["version"],

+             nvr["release"], None, None)

+         events.scheduler.add(build_task_finalize_handler, args)

  

      def _save_log(self, resultsdir, log_name, artifact_name):

          old_log = os.path.join(resultsdir, log_name)

@@ -1,100 +0,0 @@ 

- # -*- coding: utf-8 -*-

- # SPDX-License-Identifier: MIT

- # flake8: noqa

- import base64

- import traceback

- 

- import koji

- # Import krbV from here so we don't have to redo the whole try except that Koji does

- from koji import krbV, PythonImportError, AuthError, AUTHTYPE_KERB

- 

- 

- class ClientSession(koji.ClientSession):

-     """The koji.ClientSession class with patches from upstream."""

- 

-     # This backport comes from https://pagure.io/koji/pull-request/1187

-     def krb_login(self, principal=None, keytab=None, ccache=None, proxyuser=None, ctx=None):

-         """Log in using Kerberos.  If principal is not None and keytab is

-         not None, then get credentials for the given principal from the given keytab.

-         If both are None, authenticate using existing local credentials (as obtained

-         from kinit).  ccache is the absolute path to use for the credential cache. If

-         not specified, the default ccache will be used.  If proxyuser is specified,

-         log in the given user instead of the user associated with the Kerberos

-         principal.  The principal must be in the "ProxyPrincipals" list on

-         the server side.  ctx is the Kerberos context to use, and should be unique

-         per thread.  If ctx is not specified, the default context is used."""

- 

-         try:

-             # Silently try GSSAPI first

-             if self.gssapi_login(principal, keytab, ccache, proxyuser=proxyuser):

-                 return True

-         except Exception as e:

-             if krbV:

-                 e_str = ''.join(traceback.format_exception_only(type(e), e))

-                 self.logger.debug('gssapi auth failed: %s', e_str)

-                 pass

-             else:

-                 raise

- 

-         if not krbV:

-             raise PythonImportError(

-                 "Please install python-krbV to use kerberos."

-             )

- 

-         if not ctx:

-             ctx = krbV.default_context()

- 

-         if ccache != None:

-             ccache = krbV.CCache(name=ccache, context=ctx)

-         else:

-             ccache = ctx.default_ccache()

- 

-         if principal != None:

-             if keytab != None:

-                 cprinc = krbV.Principal(name=principal, context=ctx)

-                 keytab = krbV.Keytab(name=keytab, context=ctx)

-                 ccache.init(cprinc)

-                 ccache.init_creds_keytab(principal=cprinc, keytab=keytab)

-             else:

-                 raise AuthError('cannot specify a principal without a keytab')

-         else:

-             # We're trying to log ourself in.  Connect using existing credentials.

-             cprinc = ccache.principal()

- 

-         self.logger.debug('Authenticating as: %s', cprinc.name)

-         sprinc = krbV.Principal(name=self._serverPrincipal(cprinc), context=ctx)

- 

-         ac = krbV.AuthContext(context=ctx)

-         ac.flags = krbV.KRB5_AUTH_CONTEXT_DO_SEQUENCE | krbV.KRB5_AUTH_CONTEXT_DO_TIME

-         ac.rcache = ctx.default_rcache()

- 

-         # create and encode the authentication request

-         (ac, req) = ctx.mk_req(server=sprinc, client=cprinc,

-                                auth_context=ac, ccache=ccache,

-                                options=krbV.AP_OPTS_MUTUAL_REQUIRED)

-         req_enc = base64.encodestring(req)

- 

-         # ask the server to authenticate us

-         (rep_enc, sinfo_enc, addrinfo) = self.callMethod('krbLogin', req_enc, proxyuser)

-         # Set the addrinfo we received from the server

-         # (necessary before calling rd_priv())

-         # addrinfo is in (serveraddr, serverport, clientaddr, clientport)

-         # format, so swap the pairs because clientaddr is now the local addr

-         ac.addrs = tuple((addrinfo[2], addrinfo[3], addrinfo[0], addrinfo[1]))

- 

-         # decode and read the reply from the server

-         rep = base64.decodestring(rep_enc)

-         ctx.rd_rep(rep, auth_context=ac)

- 

-         # decode and decrypt the login info

-         sinfo_priv = base64.decodestring(sinfo_enc)

-         sinfo_str = ac.rd_priv(sinfo_priv)

-         sinfo = dict(zip(['session-id', 'session-key'], sinfo_str.split()))

- 

-         if not sinfo:

-             self.logger.warn('No session info received')

-             return False

-         self.setSession(sinfo)

- 

-         self.authtype = AUTHTYPE_KERB

-         return True

@@ -72,6 +72,8 @@ 

      Downloads the packages build for one of `archs` (defaults to ['x86_64',

      'noarch']) in Koji tag `tag` to `repo_dir` and creates repository in that

      directory. Needs config.koji_profile and config.koji_config to be set.

+ 

+     If the there are no builds associated with the tag, False is returned.

      """

  

      # Placed here to avoid py2/py3 conflicts...
@@ -92,6 +94,10 @@ 

      except koji.GenericError:

          log.exception("Failed to list rpms in tag %r" % tag)

  

+     if not builds:

+         log.debug("No builds are associated with the tag %r", tag)

+         return False

+ 

      # Reformat builds so they are dict with build_id as a key.

      builds = {build["build_id"]: build for build in builds}

  
@@ -162,3 +168,5 @@ 

  

          log.info("Creating local repository in %s" % repo_dir)

          execute_cmd(["/usr/bin/createrepo_c", repo_dir])

+ 

+     return True

@@ -164,6 +164,11 @@ 

              "default": ["module", "scrmod"],

              "desc": "List of allowed koji tag prefixes.",

          },

+         "koji_tag_permission": {

+             "type": str,

+             "default": "admin",

+             "desc": "Permission name to require for newly created Koji tags.",

+         },

          "koji_tag_extra_opts": {

              "type": dict,

              "default": {
@@ -244,7 +249,7 @@ 

          "log_level": {"type": str, "default": 0, "desc": "Log level"},

          "build_logs_dir": {

              "type": Path,

-             "default": "",

+             "default": tempfile.gettempdir(),

              "desc": "Directory to store module build logs to.",

          },

          "build_logs_name_format": {
@@ -656,6 +661,13 @@ 

              "desc": "The minrate configuration on a DNF repo. This configuration will cause DNF to "

                      "timeout loading a repo if the download speed is below minrate for the "

                      "duration of the timeout."

+         },

+         "celery_worker_prefetch_multiplier": {

+             "type": int,

+             "default": 1,

+             "desc": "This defaults to 1 so that the worker doesn't fetch more messages than it can "

+                     "handle at a time. This so that general tasks aren't starved when running "

+                     "a long handler.",

          }

      }

  
@@ -751,7 +763,7 @@ 

  

      def _setifok_log_backend(self, s):

          if s is None:

-             self._log_backend = "console"

+             s = "console"

          elif s not in logger.supported_log_backends():

              raise ValueError("Unsupported log backend")

          self._log_backend = str(s)

@@ -44,3 +44,7 @@ 

      response = jsonify({"status": status, "error": error, "message": message})

      response.status_code = status

      return response

+ 

+ 

+ class IgnoreMessage(Exception):

+     """Raise if message received from message bus should be ignored"""

@@ -23,6 +23,7 @@ 

  from module_build_service.errors import StreamAmbigous

  import module_build_service.messaging

  import module_build_service.scheduler.consumer

+ import module_build_service.scheduler.local

  

  

  manager = Manager(create_app)
@@ -144,6 +145,9 @@ 

          os.remove(dbpath)

  

      db.create_all()

+     # Reconfigure the backend database session registry to use the new the database location

+     db_session.remove()

+     db_session.configure(bind=db.session.bind)

  

      params = {

          "local_build": True,
@@ -158,8 +162,6 @@ 

  

      yaml_file_path = os.path.abspath(yaml_file)

  

-     from module_build_service.db_session import db_session

- 

      if offline:

          import_builds_from_local_dnf_repos(platform_id)

      load_local_builds(local_build_nsvs)
@@ -180,10 +182,7 @@ 

  

          module_build_ids = [build.id for build in module_builds]

  

-     stop = module_build_service.scheduler.make_simple_stop_condition()

- 

-     # Run the consumer until stop_condition returns True

-     module_build_service.scheduler.main([], stop)

+     module_build_service.scheduler.local.main(module_build_ids)

  

      has_failed_module = db_session.query(models.ModuleBuild).filter(

          models.ModuleBuild.id.in_(module_build_ids),

@@ -2,280 +2,11 @@ 

  # SPDX-License-Identifier: MIT

  """Generic messaging functions."""

  

- import re

  import pkg_resources

  

- try:

-     from inspect import signature

- except ImportError:

-     from funcsigs import signature

+ from module_build_service.scheduler.parser import FedmsgMessageParser

  

- from module_build_service import log

- 

- 

- class IgnoreMessage(Exception):

-     pass

- 

- 

- class BaseMessage(object):

-     def __init__(self, msg_id):

-         """

-         A base class to abstract messages from different backends

-         :param msg_id: the id of the msg (e.g. 2016-SomeGUID)

-         """

-         self.msg_id = msg_id

- 

-         # Moksha calls `consumer.validate` on messages that it receives, and

-         # even though we have validation turned off in the config there's still

-         # a step that tries to access `msg['body']`, `msg['topic']` and

-         # `msg.get('topic')`.

-         # These are here just so that the `validate` method won't raise an

-         # exception when we push our fake messages through.

-         # Note that, our fake message pushing has worked for a while... but the

-         # *latest* version of fedmsg has some code that exercises the bug.  I

-         # didn't hit this until I went to test in jenkins.

-         self.body = {}

-         self.topic = None

- 

-     def __repr__(self):

-         init_sig = signature(self.__init__)

- 

-         args_strs = (

-             "{}={!r}".format(name, getattr(self, name))

-             if param.default != param.empty

-             else repr(getattr(self, name))

-             for name, param in init_sig.parameters.items()

-         )

- 

-         return "{}({})".format(type(self).__name__, ", ".join(args_strs))

- 

-     def __getitem__(self, key):

-         """ Used to trick moksha into thinking we are a dict. """

-         return getattr(self, key)

- 

-     def __setitem__(self, key, value):

-         """ Used to trick moksha into thinking we are a dict. """

-         return setattr(self, key, value)

- 

-     def get(self, key, value=None):

-         """ Used to trick moksha into thinking we are a dict. """

-         return getattr(self, key, value)

- 

-     def __json__(self):

-         return dict(msg_id=self.msg_id, topic=self.topic, body=self.body)

- 

- 

- class MessageParser(object):

-     def parse(self, msg):

-         raise NotImplementedError()

- 

- 

- class FedmsgMessageParser(MessageParser):

-     def parse(self, msg):

-         """

-         Takes a fedmsg topic and message and converts it to a message object

-         :param msg: the message contents from the fedmsg message

-         :return: an object of BaseMessage descent if the message is a type

-         that the app looks for, otherwise None is returned

-         """

-         if "body" in msg:

-             msg = msg["body"]

-         topic = msg["topic"]

-         topic_categories = _messaging_backends["fedmsg"]["services"]

-         categories_re = "|".join(map(re.escape, topic_categories))

-         regex_pattern = re.compile(

-             r"(?P<category>" + categories_re + r")"

-             r"(?:(?:\.)(?P<object>build|repo|module|decision))?"

-             r"(?:(?:\.)(?P<subobject>state|build))?"

-             r"(?:\.)(?P<event>change|done|end|tag|update)$"

-         )

-         regex_results = re.search(regex_pattern, topic)

- 

-         if regex_results:

-             category = regex_results.group("category")

-             object = regex_results.group("object")

-             subobject = regex_results.group("subobject")

-             event = regex_results.group("event")

- 

-             msg_id = msg.get("msg_id")

-             msg_inner_msg = msg.get("msg")

- 

-             # If there isn't a msg dict in msg then this message can be skipped

-             if not msg_inner_msg:

-                 log.debug(

-                     "Skipping message without any content with the " 'topic "{0}"'.format(topic))

-                 return None

- 

-             msg_obj = None

- 

-             # Ignore all messages from the secondary koji instances.

-             if category == "buildsys":

-                 instance = msg_inner_msg.get("instance", "primary")

-                 if instance != "primary":

-                     log.debug("Ignoring message from %r koji hub." % instance)

-                     return

- 

-             if (

-                 category == "buildsys"

-                 and object == "build"

-                 and subobject == "state"

-                 and event == "change"

-             ):

-                 build_id = msg_inner_msg.get("build_id")

-                 task_id = msg_inner_msg.get("task_id")

-                 build_new_state = msg_inner_msg.get("new")

-                 build_name = msg_inner_msg.get("name")

-                 build_version = msg_inner_msg.get("version")

-                 build_release = msg_inner_msg.get("release")

- 

-                 msg_obj = KojiBuildChange(

-                     msg_id,

-                     build_id,

-                     task_id,

-                     build_new_state,

-                     build_name,

-                     build_version,

-                     build_release,

-                 )

- 

-             elif (

-                 category == "buildsys"

-                 and object == "repo"

-                 and subobject is None

-                 and event == "done"

-             ):

-                 repo_tag = msg_inner_msg.get("tag")

-                 msg_obj = KojiRepoChange(msg_id, repo_tag)

- 

-             elif category == "buildsys" and event == "tag":

-                 tag = msg_inner_msg.get("tag")

-                 name = msg_inner_msg.get("name")

-                 version = msg_inner_msg.get("version")

-                 release = msg_inner_msg.get("release")

-                 nvr = None

-                 if name and version and release:

-                     nvr = "-".join((name, version, release))

-                 msg_obj = KojiTagChange(msg_id, tag, name, nvr)

- 

-             elif (

-                 category == "mbs"

-                 and object == "module"

-                 and subobject == "state"

-                 and event == "change"

-             ):

-                 msg_obj = MBSModule(msg_id, msg_inner_msg.get("id"), msg_inner_msg.get("state"))

- 

-             elif (

-                 category == "greenwave"

-                 and object == "decision"

-                 and subobject is None

-                 and event == "update"

-             ):

-                 msg_obj = GreenwaveDecisionUpdate(

-                     msg_id=msg_id,

-                     decision_context=msg_inner_msg.get("decision_context"),

-                     policies_satisfied=msg_inner_msg.get("policies_satisfied"),

-                     subject_identifier=msg_inner_msg.get("subject_identifier"),

-                 )

- 

-             # If the message matched the regex and is important to the app,

-             # it will be returned

-             if msg_obj:

-                 return msg_obj

- 

-         return None

- 

- 

- class KojiBuildChange(BaseMessage):

-     """ A class that inherits from BaseMessage to provide a message

-     object for a build's info (in fedmsg this replaces the msg dictionary)

-     :param msg_id: the id of the msg (e.g. 2016-SomeGUID)

-     :param build_id: the id of the build (e.g. 264382)

-     :param build_new_state: the new build state, this is currently a Koji

-     integer

-     :param build_name: the name of what is being built

-     (e.g. golang-googlecode-tools)

-     :param build_version: the version of the build (e.g. 6.06.06)

-     :param build_release: the release of the build (e.g. 4.fc25)

-     :param module_build_id: the optional id of the module_build in the database

-     :param state_reason: the optional reason as to why the state changed

-     """

- 

-     def __init__(

-         self,

-         msg_id,

-         build_id,

-         task_id,

-         build_new_state,

-         build_name,

-         build_version,

-         build_release,

-         module_build_id=None,

-         state_reason=None,

-     ):

-         if task_id is None:

-             raise IgnoreMessage("KojiBuildChange with a null task_id is invalid.")

-         super(KojiBuildChange, self).__init__(msg_id)

-         self.build_id = build_id

-         self.task_id = task_id

-         self.build_new_state = build_new_state

-         self.build_name = build_name

-         self.build_version = build_version

-         self.build_release = build_release

-         self.module_build_id = module_build_id

-         self.state_reason = state_reason

- 

- 

- class KojiTagChange(BaseMessage):

-     """

-     A class that inherits from BaseMessage to provide a message

-     object for a buildsys.tag info (in fedmsg this replaces the msg dictionary)

-     :param tag: the name of tag (e.g. module-123456789-build)

-     :param artifact: the name of tagged artifact (e.g. module-build-macros)

-     :param nvr: the nvr of the tagged artifact

-     """

- 

-     def __init__(self, msg_id, tag, artifact, nvr):

-         super(KojiTagChange, self).__init__(msg_id)

-         self.tag = tag

-         self.artifact = artifact

-         self.nvr = nvr

- 

- 

- class KojiRepoChange(BaseMessage):

-     """ A class that inherits from BaseMessage to provide a message

-     object for a repo's info (in fedmsg this replaces the msg dictionary)

-     :param msg_id: the id of the msg (e.g. 2016-SomeGUID)

-     :param repo_tag: the repo's tag (e.g. SHADOWBUILD-f25-build)

-     """

- 

-     def __init__(self, msg_id, repo_tag):

-         super(KojiRepoChange, self).__init__(msg_id)

-         self.repo_tag = repo_tag

- 

- 

- class MBSModule(BaseMessage):

-     """ A class that inherits from BaseMessage to provide a message

-     object for a module event generated by module_build_service

-     :param msg_id: the id of the msg (e.g. 2016-SomeGUID)

-     :param module_build_id: the id of the module build

-     :param module_build_state: the state of the module build

-     """

- 

-     def __init__(self, msg_id, module_build_id, module_build_state):

-         super(MBSModule, self).__init__(msg_id)

-         self.module_build_id = module_build_id

-         self.module_build_state = module_build_state

- 

- 

- class GreenwaveDecisionUpdate(BaseMessage):

-     """A class representing message send to topic greenwave.decision.update"""

- 

-     def __init__(self, msg_id, decision_context, policies_satisfied, subject_identifier):

-         super(GreenwaveDecisionUpdate, self).__init__(msg_id)

-         self.decision_context = decision_context

-         self.policies_satisfied = policies_satisfied

-         self.subject_identifier = subject_identifier

+ from module_build_service import conf, log

  

  

  def publish(topic, msg, conf, service):
@@ -331,7 +62,7 @@ 

      # Create fake fedmsg from the message so we can reuse

      # the BaseMessage.from_fedmsg code to get the particular BaseMessage

      # class instance.

-     wrapped_msg = FedmsgMessageParser().parse({

+     wrapped_msg = FedmsgMessageParser(known_fedmsg_services).parse({

          "msg_id": str(_in_memory_msg_id),

          "topic": service + "." + topic,

          "msg": msg
@@ -352,16 +83,19 @@ 

          _initial_messages.append(wrapped_msg)

  

  

+ known_fedmsg_services = ["buildsys", "mbs", "greenwave"]

+ 

+ 

  _fedmsg_backend = {

      "publish": _fedmsg_publish,

-     "services": ["buildsys", "mbs", "greenwave"],

-     "parser": FedmsgMessageParser(),

+     "parser": FedmsgMessageParser(known_fedmsg_services),

+     "services": known_fedmsg_services,

      "topic_suffix": ".",

  }

  _in_memory_backend = {

      "publish": _in_memory_publish,

+     "parser": FedmsgMessageParser(known_fedmsg_services),  # re-used.  :)

      "services": [],

-     "parser": FedmsgMessageParser(),  # re-used.  :)

      "topic_suffix": ".",

  }

  
@@ -375,3 +109,7 @@ 

  

  if not _messaging_backends:

      raise ValueError("No messaging plugins are installed or available.")

+ 

+ # After loading registered messaging backends, the default messaging backend

+ # can be determined by configured messaging backend.

+ default_messaging_backend = _messaging_backends[conf.messaging]

file modified
+13 -44
@@ -19,6 +19,7 @@ 

  import module_build_service.messaging

  from module_build_service import db, log, get_url_for, conf

  from module_build_service.errors import UnprocessableEntity

+ from module_build_service.scheduler import events

  

  DEFAULT_MODULE_CONTEXT = "00000000"

  
@@ -473,7 +474,7 @@ 

          raise ValueError("%s: %s, not in %r" % (key, field, BUILD_STATES))

  

      @validates("rebuild_strategy")

-     def validate_rebuild_stategy(self, key, rebuild_strategy):

+     def validate_rebuild_strategy(self, key, rebuild_strategy):

          if rebuild_strategy not in self.rebuild_strategies.keys():

              choices = ", ".join(self.rebuild_strategies.keys())

              raise ValueError(
@@ -484,7 +485,7 @@ 

  

      @classmethod

      def from_module_event(cls, db_session, event):

-         if type(event) == module_build_service.messaging.MBSModule:

+         if type(event) == events.MBSModule:

              return db_session.query(cls).filter(cls.id == event.module_build_id).first()

          else:

              raise ValueError("%r is not a module message." % type(event).__name__)
@@ -746,41 +747,15 @@ 

          return db_session.query(ModuleBuild).filter_by(state=BUILD_STATES[state]).all()

  

      @classmethod

-     def from_repo_done_event(cls, db_session, event):

-         """ Find the ModuleBuilds in our database that should be in-flight...

-         ... for a given koji tag.

- 

-         There should be at most one.

-         """

-         if event.repo_tag.endswith("-build"):

-             tag = event.repo_tag[:-6]

-         else:

-             tag = event.repo_tag

-         query = (

-             db_session.query(cls)

-             .filter(cls.koji_tag == tag)

-             .filter(cls.state == BUILD_STATES["build"])

-         )

- 

-         count = query.count()

-         if count > 1:

-             raise RuntimeError("%r module builds in flight for %r" % (count, tag))

- 

-         return query.first()

- 

-     @classmethod

-     def from_tag_change_event(cls, db_session, event):

-         tag = event.tag[:-6] if event.tag.endswith("-build") else event.tag

-         query = (

-             db_session.query(cls)

-             .filter(cls.koji_tag == tag)

-             .filter(cls.state == BUILD_STATES["build"])

+     def get_by_tag(cls, db_session, tag_name):

+         tag = tag_name[:-6] if tag_name.endswith("-build") else tag_name

+         query = db_session.query(cls).filter(

+             cls.koji_tag == tag,

+             cls.state == BUILD_STATES["build"]

          )

- 

          count = query.count()

          if count > 1:

              raise RuntimeError("%r module builds in flight for %r" % (count, tag))

- 

          return query.first()

  

      def short_json(self, show_stream_version=False, show_scratch=True):
@@ -1127,18 +1102,12 @@ 

      weight = db.Column(db.Float, default=0)

  

      @classmethod

-     def from_component_event(cls, db_session, event):

-         if isinstance(event, module_build_service.messaging.KojiBuildChange):

-             if event.module_build_id:

-                 return (

-                     db_session.query(cls)

-                     .filter_by(task_id=event.task_id, module_id=event.module_build_id)

-                     .one()

-                 )

-             else:

-                 return db_session.query(cls).filter(cls.task_id == event.task_id).first()

+     def from_component_event(cls, db_session, task_id, module_id=None):

+         _filter = db_session.query(cls).filter

+         if module_id is None:

+             return _filter(cls.task_id == task_id).first()

          else:

-             raise ValueError("%r is not a koji message." % event["topic"])

+             return _filter(cls.task_id == task_id, cls.module_id == module_id).one()

  

      @classmethod

      def from_component_name(cls, db_session, component_name, module_id):

@@ -1,87 +1,3 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

  """ This is a sub-module for backend/scheduler functionality. """

- 

- import fedmsg

- import moksha.hub

- 

- import module_build_service.models

- import module_build_service.scheduler.consumer

- 

- from module_build_service.db_session import db_session

- 

- import logging

- 

- log = logging.getLogger(__name__)

- 

- 

- def main(initial_messages, stop_condition):

-     """ Run the consumer until some condition is met.

- 

-     Setting stop_condition to None will run the consumer forever.

-     """

- 

-     config = fedmsg.config.load_config()

-     config["mbsconsumer"] = True

-     config["mbsconsumer.stop_condition"] = stop_condition

-     config["mbsconsumer.initial_messages"] = initial_messages

- 

-     # Moksha requires that we subscribe to *something*, so tell it /dev/null

-     # since we'll just be doing in-memory queue-based messaging for this single

-     # build.

-     config["zmq_enabled"] = True

-     config["zmq_subscribe_endpoints"] = "ipc:///dev/null"

- 

-     consumers = [module_build_service.scheduler.consumer.MBSConsumer]

- 

-     # Note that the hub we kick off here cannot send any message.  You

-     # should use fedmsg.publish(...) still for that.

-     moksha.hub.main(

-         # Pass in our config dict

-         options=config,

-         # Only run the specified consumers if any are so specified.

-         consumers=consumers,

-         # Do not run default producers.

-         producers=[],

-         # Tell moksha to quiet its logging.

-         framework=False,

-     )

- 

- 

- def make_simple_stop_condition():

-     """ Return a simple stop_condition callable.

- 

-     Intended to be used with the main() function here in manage.py and tests.

- 

-     The stop_condition returns true when the latest module build enters the any

-     of the finished states.

-     """

- 

-     def stop_condition(message):

-         # XXX - We ignore the message here and instead just query the DB.

- 

-         # Grab the latest module build.

-         module = (

-             db_session.query(module_build_service.models.ModuleBuild)

-             .order_by(module_build_service.models.ModuleBuild.id.desc())

-             .first()

-         )

-         done = (

-             module_build_service.models.BUILD_STATES["failed"],

-             module_build_service.models.BUILD_STATES["ready"],

-             module_build_service.models.BUILD_STATES["done"],

-         )

-         result = module.state in done

-         log.debug("stop_condition checking %r, got %r" % (module, result))

- 

-         # moksha.hub.main starts the hub and runs it in a separate thread. When

-         # the result is True, remove the db_session from that thread local so

-         # that any pending queries in the transaction will not block other

-         # queries made from other threads.

-         # This is useful for testing particularly.

-         if result:

-             db_session.remove()

- 

-         return result

- 

-     return stop_condition

@@ -5,7 +5,6 @@ 

  to use.

  """

  

- import inspect

  import itertools

  

  try:
@@ -18,21 +17,50 @@ 

  import koji

  import fedmsg.consumers

  import moksha.hub

- import six

  import sqlalchemy.exc

  

  import module_build_service.messaging

- import module_build_service.scheduler.handlers.repos

- import module_build_service.scheduler.handlers.components

- import module_build_service.scheduler.handlers.modules

- import module_build_service.scheduler.handlers.tags

- import module_build_service.scheduler.handlers.greenwave

  import module_build_service.monitor as monitor

  

  from module_build_service import models, log, conf

  from module_build_service.db_session import db_session

+ from module_build_service.errors import IgnoreMessage

+ from module_build_service.messaging import default_messaging_backend

+ from module_build_service.scheduler import events

+ from module_build_service.scheduler.handlers import components

+ from module_build_service.scheduler.handlers import repos

+ from module_build_service.scheduler.handlers import modules

+ from module_build_service.scheduler.handlers import tags

  from module_build_service.scheduler.handlers import greenwave

- from module_build_service.utils import module_build_state_from_msg

+ 

+ 

+ def no_op_handler(*args, **kwargs):

+     return True

+ 

+ 

+ ON_BUILD_CHANGE_HANDLERS = {

+     koji.BUILD_STATES["BUILDING"]: no_op_handler,

+     koji.BUILD_STATES["COMPLETE"]: components.build_task_finalize,

+     koji.BUILD_STATES["FAILED"]: components.build_task_finalize,

+     koji.BUILD_STATES["CANCELED"]: components.build_task_finalize,

+     koji.BUILD_STATES["DELETED"]: no_op_handler,

+ }

+ 

+ ON_MODULE_CHANGE_HANDLERS = {

+     models.BUILD_STATES["init"]: modules.init,

+     models.BUILD_STATES["wait"]: modules.wait,

+     models.BUILD_STATES["build"]: no_op_handler,

+     models.BUILD_STATES["failed"]: modules.failed,

+     models.BUILD_STATES["done"]: modules.done,

+     # XXX: DIRECT TRANSITION TO READY

+     models.BUILD_STATES["ready"]: no_op_handler,

+     models.BUILD_STATES["garbage"]: no_op_handler,

+ }

+ 

+ # Only one kind of repo change event, though...

+ ON_REPO_CHANGE_HANDLER = repos.done

+ ON_TAG_CHANGE_HANDLER = tags.tagged

+ ON_DECISION_UPDATE_HANDLER = greenwave.decision_update

  

  

  class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
@@ -50,10 +78,9 @@ 

      def __init__(self, hub):

          # Topic setting needs to be done *before* the call to `super`.

  

-         backends = module_build_service.messaging._messaging_backends

          prefixes = conf.messaging_topic_prefix  # This is a list.

-         services = backends[conf.messaging]["services"]

-         suffix = backends[conf.messaging]["topic_suffix"]

+         services = default_messaging_backend["services"]

+         suffix = default_messaging_backend["topic_suffix"]

          self.topic = [

              "{}.{}{}".format(prefix.rstrip("."), category, suffix)

              for prefix, category in itertools.product(prefixes, services)
@@ -84,34 +111,6 @@ 

              msg = module_build_service.messaging._initial_messages.pop(0)

              self.incoming.put(msg)

  

-         # These are our main lookup tables for figuring out what to run in

-         # response to what messaging events.

-         self.NO_OP = NO_OP = lambda config, msg: True

-         self.on_build_change = {

-             koji.BUILD_STATES["BUILDING"]: NO_OP,

-             koji.BUILD_STATES[

-                 "COMPLETE"

-             ]: module_build_service.scheduler.handlers.components.complete,

-             koji.BUILD_STATES["FAILED"]: module_build_service.scheduler.handlers.components.failed,

-             koji.BUILD_STATES[

-                 "CANCELED"

-             ]: module_build_service.scheduler.handlers.components.canceled,

-             koji.BUILD_STATES["DELETED"]: NO_OP,

-         }

-         self.on_module_change = {

-             models.BUILD_STATES["init"]: module_build_service.scheduler.handlers.modules.init,

-             models.BUILD_STATES["wait"]: module_build_service.scheduler.handlers.modules.wait,

-             models.BUILD_STATES["build"]: NO_OP,

-             models.BUILD_STATES["failed"]: module_build_service.scheduler.handlers.modules.failed,

-             models.BUILD_STATES["done"]: module_build_service.scheduler.handlers.modules.done,

-             # XXX: DIRECT TRANSITION TO READY

-             models.BUILD_STATES["ready"]: NO_OP,

-             models.BUILD_STATES["garbage"]: NO_OP,

-         }

-         # Only one kind of repo change event, though...

-         self.on_repo_change = module_build_service.scheduler.handlers.repos.done

-         self.on_tag_change = module_build_service.scheduler.handlers.tags.tagged

-         self.on_decision_update = module_build_service.scheduler.handlers.greenwave.decision_update

          self.sanity_check()

  

      def shutdown(self):
@@ -124,8 +123,8 @@ 

      def validate(self, message):

          if conf.messaging == "fedmsg":

              # If this is a faked internal message, don't bother.

-             if isinstance(message, module_build_service.messaging.BaseMessage):

-                 log.info("Skipping crypto validation for %r" % message)

+             if "event" in message:

+                 log.info("Skipping crypto validation for %r", message)

                  return

              # Otherwise, if it is a real message from the network, pass it

              # through crypto validation.
@@ -139,14 +138,18 @@ 

          # messages, then just use them as-is.  If they are not already

          # instances of our message abstraction base class, then first transform

          # them before proceeding.

-         if isinstance(message, module_build_service.messaging.BaseMessage):

-             msg = message

+         if "event" in message:

+             event_info = message

          else:

-             msg = self.get_abstracted_msg(message)

+             try:

+                 event_info = self.get_abstracted_event_info(message)

+             except IgnoreMessage as e:

+                 log.warning(str(e))

+                 return

  

          # Primary work is done here.

          try:

-             self.process_message(msg)

+             self.process_message(event_info)

              monitor.messaging_rx_processed_ok_counter.inc()

          except sqlalchemy.exc.OperationalError as error:

              monitor.messaging_rx_failed_counter.inc()
@@ -164,12 +167,13 @@ 

          if self.stop_condition and self.stop_condition(message):

              self.shutdown()

  

-     def get_abstracted_msg(self, message):

-         parser = module_build_service.messaging._messaging_backends[conf.messaging].get("parser")

+     @staticmethod

+     def get_abstracted_event_info(message):

+         parser = default_messaging_backend.get("parser")

          if parser:

              try:

                  return parser.parse(message)

-             except module_build_service.messaging.IgnoreMessage:

+             except IgnoreMessage:

                  pass

          else:

              raise ValueError("{0} backend does not define a message parser".format(conf.messaging))
@@ -178,83 +182,91 @@ 

          """ On startup, make sure our implementation is sane. """

          # Ensure we have every state covered

          for state in models.BUILD_STATES:

-             if models.BUILD_STATES[state] not in self.on_module_change:

+             if models.BUILD_STATES[state] not in ON_MODULE_CHANGE_HANDLERS:

                  raise KeyError("Module build states %r not handled." % state)

          for state in koji.BUILD_STATES:

-             if koji.BUILD_STATES[state] not in self.on_build_change:

+             if koji.BUILD_STATES[state] not in ON_BUILD_CHANGE_HANDLERS:

                  raise KeyError("Koji build states %r not handled." % state)

  

-         all_fns = list(self.on_build_change.items()) + list(self.on_module_change.items())

-         for key, callback in all_fns:

-             expected = ["config", "msg"]

-             if six.PY2:

-                 argspec = inspect.getargspec(callback)[0]

-             else:

-                 argspec = inspect.getfullargspec(callback)[0]

-             if argspec != expected:

-                 raise ValueError(

-                     "Callback %r, state %r has argspec %r!=%r" % (callback, key, argspec, expected))

- 

-     def _map_message(self, db_session, msg):

+     def _map_message(self, db_session, event_info):

          """Map message to its corresponding event handler and module build"""

  

-         if isinstance(msg, module_build_service.messaging.KojiBuildChange):

-             handler = self.on_build_change[msg.build_new_state]

-             build = models.ComponentBuild.from_component_event(db_session, msg)

+         event = event_info["event"]

+ 

+         if event == events.KOJI_BUILD_CHANGE:

+             handler = ON_BUILD_CHANGE_HANDLERS[event_info["build_new_state"]]

+             build = models.ComponentBuild.from_component_event(

+                 db_session, event_info["task_id"], event_info["module_build_id"])

              if build:

                  build = build.module_build

              return handler, build

  

-         if isinstance(msg, module_build_service.messaging.KojiRepoChange):

+         if event == events.KOJI_REPO_CHANGE:

              return (

-                 self.on_repo_change,

-                 models.ModuleBuild.from_repo_done_event(db_session, msg)

+                 ON_REPO_CHANGE_HANDLER,

+                 models.ModuleBuild.get_by_tag(db_session, event_info["repo_tag"])

              )

  

-         if isinstance(msg, module_build_service.messaging.KojiTagChange):

+         if event == events.KOJI_TAG_CHANGE:

              return (

-                 self.on_tag_change,

-                 models.ModuleBuild.from_tag_change_event(db_session, msg)

+                 ON_TAG_CHANGE_HANDLER,

+                 models.ModuleBuild.get_by_tag(db_session, event_info["tag_name"])

              )

  

-         if isinstance(msg, module_build_service.messaging.MBSModule):

+         if event == events.MBS_MODULE_STATE_CHANGE:

+             state = event_info["module_build_state"]

+             valid_module_build_states = list(models.BUILD_STATES.values())

+             if state not in valid_module_build_states:

+                 raise ValueError("state={}({}) is not in {}.".format(

+                     state, type(state), valid_module_build_states

+                 ))

              return (

-                 self.on_module_change[module_build_state_from_msg(msg)],

-                 models.ModuleBuild.from_module_event(db_session, msg)

+                 ON_MODULE_CHANGE_HANDLERS[state],

+                 models.ModuleBuild.get_by_id(db_session, event_info["module_build_id"])

              )

  

-         if isinstance(msg, module_build_service.messaging.GreenwaveDecisionUpdate):

+         if event == events.GREENWAVE_DECISION_UPDATE:

              return (

-                 self.on_decision_update,

-                 greenwave.get_corresponding_module_build(msg.subject_identifier)

+                 ON_DECISION_UPDATE_HANDLER,

+                 greenwave.get_corresponding_module_build(event_info["subject_identifier"])

              )

  

          return None, None

  

-     def process_message(self, msg):

+     def process_message(self, event_info):

          # Choose a handler for this message

-         handler, build = self._map_message(db_session, msg)

+         handler, build = self._map_message(db_session, event_info)

  

          if handler is None:

-             log.debug("No event handler associated with msg %s", msg.msg_id)

+             log.debug("No event handler associated with msg %s", event_info["msg_id"])

              return

  

-         idx = "%s: %s, %s" % (handler.__name__, type(msg).__name__, msg.msg_id)

+         idx = "%s: %s, %s" % (

+             handler.__name__, event_info["event"], event_info["msg_id"])

  

-         if handler is self.NO_OP:

+         if handler is no_op_handler:

              log.debug("Handler is NO_OP: %s", idx)

              return

  

          if not build:

-             log.debug("No module associated with msg %s", msg.msg_id)

+             log.debug("No module associated with msg %s", event_info["msg_id"])

              return

  

          MBSConsumer.current_module_build_id = build.id

  

          log.info("Calling %s", idx)

  

+         kwargs = event_info.copy()

+         kwargs.pop("event")

+ 

          try:

-             further_work = handler(conf, msg) or []

+             if conf.celery_broker_url:

+                 # handlers are also Celery tasks, when celery_broker_url is configured,

+                 # call "delay" method to run the handlers as Celery async tasks

+                 func = getattr(handler, "delay")

+                 func(**kwargs)

+             else:

+                 handler(**kwargs)

          except Exception as e:

              log.exception("Could not process message handler.")

              db_session.rollback()
@@ -270,16 +282,6 @@ 

  

              # Allow caller to do something when error is occurred.

              raise

-         else:

-             # Handlers can *optionally* return a list of fake messages that

-             # should be re-inserted back into the main work queue. We can use

-             # this (for instance) when we submit a new component build but (for

-             # some reason) it has already been built, then it can fake its own

-             # completion back to the scheduler so that work resumes as if it

-             # was submitted for real and koji announced its completion.

-             for event in further_work:

-                 log.info("  Scheduling faked event %r", event)

-                 self.incoming.put(event)

          finally:

              MBSConsumer.current_module_build_id = None

              log.debug("Done with %s", idx)
@@ -302,9 +304,3 @@ 

      """ Artificially put a message into the work queue of the consumer. """

      consumer = get_global_consumer()

      consumer.incoming.put(msg)

- 

- 

- def fake_repo_done_message(tag_name):

-     msg = module_build_service.messaging.KojiRepoChange(

-         msg_id="a faked internal message", repo_tag=tag_name + "-build")

-     work_queue_put(msg)

@@ -22,7 +22,7 @@ 

      get_compatible_base_module_mmds, expand_single_mse_streams)

  

  

- def add_default_modules(mmd, arches):

+ def add_default_modules(mmd):

      """

      Add default modules as buildrequires to the input modulemd.

  
@@ -31,8 +31,6 @@ 

      database will be logged and ignored.

  

      :param Modulemd.ModuleStream mmd: the modulemd of the module to add the module defaults to

-     :param list arches: the arches to limit the external repo queries to; this should be the arches

-         the module will be built with

      :raises RuntimeError: if the buildrequired base module isn't in the database or the default

          modules list can't be downloaded

      """

@@ -0,0 +1,87 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ """

+ This module defines constant for events emitted by external services that work

+ with MBS together to complete a module build.

+ 

+ The event name is defined in general as much as possible, especially for the

+ events from Koji. Because some instance based on Koji, like Brew, might send

+ messages to different topics on different message bus. For example, when a

+ build is complete, Koji sends a message to topic buildsys.build.state.change,

+ however Brew sends to topic brew.build.complete, etc.

+ """

+ 

+ import time

+ import sched

+ from functools import wraps

+ 

+ from module_build_service import log

+ 

+ 

+ KOJI_BUILD_CHANGE = "koji_build_change"

+ KOJI_TAG_CHANGE = "koji_tag_change"

+ KOJI_REPO_CHANGE = "koji_repo_change"

+ MBS_MODULE_STATE_CHANGE = "mbs_module_state_change"

+ GREENWAVE_DECISION_UPDATE = "greenwave_decision_update"

+ 

+ 

+ class Scheduler(sched.scheduler):

+     """

+     Subclass of `sched.scheduler` allowing to schedule handlers calls.

+ 

+     If one of the MBS handler functions need to call another handler, they need to do it in a safe

+     way - such another handler call should not be done in the middle of another handler's

+     execution.

+ 

+     This class provides an solution for that. Handler can schedule run of other handler using

+     the `add` method. The handlers should use `mbs_event_handler` decorator which ensures that

+     the `run` method is called at the end of handler's execution and other scheduler handlers

+     are executed.

+     """

+ 

+     def add(self, handler, arguments=()):

+         """

+         Schedule execution of `handler` with `arguments`.

+         """

+         self.enter(0, 0, handler.delay, arguments)

+ 

+     def run(self):

+         """

+         Runs scheduled handlers.

+         """

+         log.debug("Running event scheduler with following events:")

+         for event in self.queue:

+             log.debug("    %r", event)

+         sched.scheduler.run(self)

+ 

+     def reset(self):

+         """

+         Resets the Scheduler to initial state.

+         """

+         while not self.empty():

+             self.cancel(self.queue[0])

+ 

+ 

+ scheduler = Scheduler(time.time, delayfunc=lambda x: x)

+ 

+ 

+ def mbs_event_handler():

+     """

+     A decorator for MBS event handlers. It implements common tasks which should otherwise

+     be repeated in every MBS event handler, for example:

+ 

+       - at the end of handler, call events.scheduler.run().

+     """

+ 

+     def decorator(func):

+         @wraps(func)

+         def wrapper(*args, **kwargs):

+             try:

+                 return func(*args, **kwargs)

+             finally:

+                 scheduler.run()

+ 

+         return wrapper

+ 

+     return decorator

@@ -4,41 +4,69 @@ 

  

  import logging

  import koji

- import module_build_service.builder

  

+ from module_build_service import celery_app, conf, models, log

+ from module_build_service.builder import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service.utils.general import mmd_to_str

- from module_build_service import models, log, messaging

  from module_build_service.db_session import db_session

+ from module_build_service.scheduler import events

+ from module_build_service.utils.batches import continue_batch_build

  

  logging.basicConfig(level=logging.DEBUG)

  

  

- def _finalize(config, msg, state):

-     """ Called whenever a koji build completes or fails. """

+ @celery_app.task

+ @events.mbs_event_handler()

+ def build_task_finalize(

+         msg_id, build_id, task_id, build_new_state,

+         build_name, build_version, build_release,

+         module_build_id=None, state_reason=None

+ ):

+     """Called when corresponding Koji build task of a component build finishes

+ 

+     When a task finishes, the task could be in state COMPLETE, FAILED or CANCELED.

+ 

+     :param str msg_id: the original id of the message being handled which is

+         received from the message bus.

+     :param int build_id: the Koji build id.

+     :param int task_id: the Koji build task id.

+     :param int build_new_state: the state of the build. Refer to

+         ``koji.BUILD_STATES`` for details. For this handler, values could be

+         the corresponding integer value of COMPLETE, FAILED or CANCELED.

+     :param str build_name: the build name.

+     :param str build_version: the build version.

+     :param str build_release: the build release.

+     :param int module_build_id: optionally set when this event handler is

+         scheduled from internal rather than just handling the received message.

+         When set, the value should be the id of module build having the

+         component build just built by the finished task.

+     :param str state_reason: optional. When set a reason explicitly, the

+         corresponding component build will have this reason as the

+         ``state_reason``. Otherwise, a custom reason will be set for a failed

+         build.

+     """

  

      # First, find our ModuleBuild associated with this component, if any.

-     component_build = models.ComponentBuild.from_component_event(db_session, msg)

-     try:

-         nvr = "{}-{}-{}".format(msg.build_name, msg.build_version, msg.build_release)

-     except KeyError:

-         nvr = None

+     component_build = models.ComponentBuild.from_component_event(

+         db_session, task_id, module_id=module_build_id)

+     nvr = "{}-{}-{}".format(build_name, build_version, build_release)

  

      if not component_build:

-         log.debug("We have no record of %s" % nvr)

+         log.debug("We have no record of %s", nvr)

          return

  

-     log.info("Saw relevant component build of %r from %r." % (nvr, msg.msg_id))

+     log.info("Saw relevant component build of %r from %r.", nvr, msg_id)

  

-     if msg.state_reason:

-         state_reason = msg.state_reason

-     elif state != koji.BUILD_STATES["COMPLETE"]:

-         state_reason = "Failed to build artifact %s in Koji" % (msg.build_name)

+     if state_reason:

+         state_reason = state_reason

+     elif build_new_state != koji.BUILD_STATES["COMPLETE"]:

+         state_reason = "Failed to build artifact {} in Koji".format(build_name)

      else:

          state_reason = ""

  

      # Mark the state in the db.

-     component_build.state = state

+     component_build.state = build_new_state

      component_build.nvr = nvr

      component_build.state_reason = state_reason

      db_session.commit()
@@ -46,10 +74,11 @@ 

      parent = component_build.module_build

  

      # If the macro build failed, then the module is doomed.

-     if component_build.package == "module-build-macros" and state != koji.BUILD_STATES["COMPLETE"]:

+     if (component_build.package == "module-build-macros"

+             and build_new_state != koji.BUILD_STATES["COMPLETE"]):

          parent.transition(

              db_session,

-             config,

+             conf,

              state=models.BUILD_STATES["failed"],

              state_reason=state_reason,

              failure_type="user",
@@ -59,10 +88,10 @@ 

  

      if (

          component_build.buildonly

-         and config.system in ["koji", "test"]

-         and state == koji.BUILD_STATES["COMPLETE"]

+         and conf.system in ["koji", "test"]

+         and build_new_state == koji.BUILD_STATES["COMPLETE"]

      ):

-         koji_session = KojiModuleBuilder.get_session(config)

+         koji_session = KojiModuleBuilder.get_session(conf)

          rpms = koji_session.listBuildRPMs(component_build.nvr)

          mmd = parent.mmd()

          for artifact in rpms:
@@ -70,8 +99,6 @@ 

          parent.modulemd = mmd_to_str(mmd)

          db_session.commit()

  

-     further_work = []

- 

      parent_current_batch = parent.current_batch()

  

      # If there are no other components still building in a batch,
@@ -84,9 +111,7 @@ 

          failed_components_in_batch = [c for c in parent_current_batch if c.is_unsuccessful]

          built_components_in_batch = [c for c in parent_current_batch if c.is_completed]

  

-         builder = module_build_service.builder.GenericBuilder.create_from_module(

-             db_session, parent, config

-         )

+         builder = GenericBuilder.create_from_module(db_session, parent, conf)

  

          if failed_components_in_batch:

              log.info(
@@ -97,7 +122,7 @@ 

                  ", ".join(c.package for c in failed_components_in_batch))

              parent.transition(

                  db_session,

-                 config,

+                 conf,

                  state=models.BUILD_STATES["failed"],

                  state_reason=state_reason,

                  failure_type="user",
@@ -109,10 +134,9 @@ 

              # The repository won't be regenerated in this case and therefore we generate fake repo

              # change message here.

              log.info("Batch done. No component to tag")

-             further_work += [

-                 messaging.KojiRepoChange(

-                     "components::_finalize: fake msg", builder.module_build_tag["name"])

-             ]

+             from module_build_service.scheduler.handlers.repos import done as repos_done_handler

+             events.scheduler.add(

+                 repos_done_handler, ("fake_msg", builder.module_build_tag["name"]))

          else:

              built_component_nvrs_in_batch = [c.nvr for c in built_components_in_batch]

              # tag && add to srpm-build group if neccessary
@@ -147,21 +171,5 @@ 

          # done in repos.py:done(...), but because we have just finished one

          # build, try to call continue_batch_build again so in case we hit the

          # threshold previously, we will submit another build from this batch.

-         builder = module_build_service.builder.GenericBuilder.create_from_module(

-             db_session, parent, config)

-         further_work += module_build_service.utils.continue_batch_build(

-             config, parent, builder)

- 

-     return further_work

- 

- 

- def complete(config, msg):

-     return _finalize(config, msg, state=koji.BUILD_STATES["COMPLETE"])

- 

- 

- def failed(config, msg):

-     return _finalize(config, msg, state=koji.BUILD_STATES["FAILED"])

- 

- 

- def canceled(config, msg):

-     return _finalize(config, msg, state=koji.BUILD_STATES["CANCELED"])

+         builder = GenericBuilder.create_from_module(db_session, parent, conf)

+         continue_batch_build(conf, parent, builder)

@@ -1,9 +1,10 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

- from module_build_service import conf, log

+ from module_build_service import celery_app, conf, log

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service.db_session import db_session

  from module_build_service.models import ModuleBuild, BUILD_STATES

+ from module_build_service.scheduler import events

  

  

  def get_corresponding_module_build(nvr):
@@ -31,36 +32,40 @@ 

      return ModuleBuild.get_by_id(db_session, module_build_id)

  

  

- def decision_update(config, msg):

+ @celery_app.task

+ @events.mbs_event_handler()

+ def decision_update(msg_id, decision_context, subject_identifier, policies_satisfied):

      """Move module build to ready or failed according to Greenwave result

  

-     :param config: the config object returned from function :func:`init_config`,

-         which is loaded from configuration file.

-     :type config: :class:`Config`

-     :param msg: the message object representing a message received from topic

-         ``greenwave.decision.update``.

-     :type msg: :class:`GreenwaveDecisionUpdate`

+     :param str msg_id: the original id of the message being handled which is

+         received from the message bus.

+     :param str decision_context: the context of the greewave decision. Refer to

+         the messaging document for detailed information.

+     :param str subject_identifier: usually a build NVR. Refer to

+         https://docs.pagure.org/greenwave/messaging.html for detailed information.

+     :param bool policies_satisfied: whether the build satisfies Greenwave rules.

+         Refer to the messaging document for detailed information.

      """

-     if not config.greenwave_decision_context:

+     if not conf.greenwave_decision_context:

          log.debug(

              "Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT "

              "configured",

-             msg.msg_id,

+             msg_id,

          )

          return

  

-     if msg.decision_context != config.greenwave_decision_context:

+     if decision_context != conf.greenwave_decision_context:

          log.debug(

              "Skip Greenwave message %s as MBS only handles messages with the "

              'decision context "%s"',

-             msg.msg_id,

-             config.greenwave_decision_context,

+             msg_id,

+             conf.greenwave_decision_context,

          )

          return

  

-     module_build_nvr = msg.subject_identifier

+     module_build_nvr = subject_identifier

  

-     if not msg.policies_satisfied:

+     if not policies_satisfied:

          log.debug(

              "Skip to handle module build %s because it has not satisfied Greenwave policies.",

              module_build_nvr,
@@ -87,8 +92,7 @@ 

          log.warning(

              "Module build %s is not in done state but Greenwave tells "

              "it passes tests in decision context %s",

-             module_build_nvr,

-             msg.decision_context,

+             module_build_nvr, decision_context,

          )

  

      db_session.commit()

@@ -2,11 +2,9 @@ 

  # SPDX-License-Identifier: MIT

  """ Handlers for module change events on the message bus. """

  

- from module_build_service import conf, models, log, build_logs

- import module_build_service.builder

+ from module_build_service import celery_app, conf, models, log, build_logs

  import module_build_service.resolver

  import module_build_service.utils

- import module_build_service.messaging

  from module_build_service.utils import (

      attempt_to_reuse_all_components,

      record_component_builds,
@@ -16,11 +14,13 @@ 

      record_module_build_arches

  )

  from module_build_service.db_session import db_session

+ from module_build_service.builder import GenericBuilder

  from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError

  from module_build_service.utils.greenwave import greenwave

  from module_build_service.scheduler.default_modules import (

      add_default_modules, handle_collisions_with_base_module_rpms)

  from module_build_service.utils.submit import format_mmd

+ from module_build_service.scheduler import events

  from module_build_service.utils.ursine import handle_stream_collision_modules

  

  from requests.exceptions import ConnectionError
@@ -40,27 +40,31 @@ 

      return os.path.basename(srpm_path).replace(".src.rpm", "")

  

  

- def failed(config, msg):

-     """

-     Called whenever a module enters the 'failed' state.

+ @celery_app.task

+ @events.mbs_event_handler()

+ def failed(msg_id, module_build_id, module_build_state):

+     """Called whenever a module enters the 'failed' state.

  

      We cancel all the remaining component builds of a module

      and stop the building.

-     """

  

-     build = models.ModuleBuild.from_module_event(db_session, msg)

+     :param str msg_id: the original id of the message being handled, which is

+         received from the message bus.

+     :param int module_build_id: the module build id.

+     :param int module_build_state: the module build state.

+     """

+     build = models.ModuleBuild.get_by_id(db_session, module_build_id)

  

-     if build.state != msg.module_build_state:

+     if build.state != module_build_state:

          log.warning(

              "Note that retrieved module state %r doesn't match message module state %r",

-             build.state, msg.module_build_state,

+             build.state, module_build_state,

          )

          # This is ok.. it's a race condition we can ignore.

          pass

  

      if build.koji_tag:

-         builder = module_build_service.builder.GenericBuilder.create_from_module(

-             db_session, build, config)

+         builder = GenericBuilder.create_from_module(db_session, build, conf)

  

          if build.new_repo_task_id:

              builder.cancel_build(build.new_repo_task_id)
@@ -80,7 +84,7 @@ 

              reason = "Missing koji tag. Assuming previously failed module lookup."

              log.error(reason)

              build.transition(

-                 db_session, config,

+                 db_session, conf,

                  state=models.BUILD_STATES["failed"],

                  state_reason=reason, failure_type="infra")

              db_session.commit()
@@ -89,27 +93,34 @@ 

      # Don't transition it again if it's already been transitioned

      if build.state != models.BUILD_STATES["failed"]:

          build.transition(

-             db_session, config, state=models.BUILD_STATES["failed"], failure_type="user")

+             db_session, conf, state=models.BUILD_STATES["failed"], failure_type="user")

  

      db_session.commit()

  

      build_logs.stop(build)

-     module_build_service.builder.GenericBuilder.clear_cache(build)

+     GenericBuilder.clear_cache(build)

  

  

- def done(config, msg):

+ @celery_app.task

+ @events.mbs_event_handler()

+ def done(msg_id, module_build_id, module_build_state):

      """Called whenever a module enters the 'done' state.

  

      We currently don't do anything useful, so moving to ready.

      Except for scratch module builds, which remain in the done state.

      Otherwise the done -> ready state should happen when all

      dependent modules were re-built, at least that's the current plan.

+ 

+     :param str msg_id: the original id of the message being handled, which is

+         received from the message bus.

+     :param int module_build_id: the module build id.

+     :param int module_build_state: the module build state.

      """

-     build = models.ModuleBuild.from_module_event(db_session, msg)

-     if build.state != msg.module_build_state:

+     build = models.ModuleBuild.get_by_id(db_session, module_build_id)

+     if build.state != module_build_state:

          log.warning(

              "Note that retrieved module state %r doesn't match message module state %r",

-             build.state, msg.module_build_state,

+             build.state, module_build_state,

          )

          # This is ok.. it's a race condition we can ignore.

          pass
@@ -117,7 +128,7 @@ 

      # Scratch builds stay in 'done' state

      if not build.scratch:

          if greenwave is None or greenwave.check_gating(build):

-             build.transition(db_session, config, state=models.BUILD_STATES["ready"])

+             build.transition(db_session, conf, state=models.BUILD_STATES["ready"])

          else:

              build.state_reason = "Gating failed"

              if greenwave.error_occurred:
@@ -126,15 +137,23 @@ 

          db_session.commit()

  

      build_logs.stop(build)

-     module_build_service.builder.GenericBuilder.clear_cache(build)

+     GenericBuilder.clear_cache(build)

  

  

- def init(config, msg):

-     """ Called whenever a module enters the 'init' state."""

+ @celery_app.task

+ @events.mbs_event_handler()

+ def init(msg_id, module_build_id, module_build_state):

+     """Called whenever a module enters the 'init' state.

+ 

+     :param str msg_id: the original id of the message being handled, which is

+         received from message bus.

+     :param int module_build_id: the module build id.

+     :param int module_build_state: the module build state.

+     """

      # Sleep for a few seconds to make sure the module in the database is committed

      # TODO: Remove this once messaging is implemented in SQLAlchemy hooks

      for i in range(3):

-         build = models.ModuleBuild.from_module_event(db_session, msg)

+         build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          if build:

              break

          time.sleep(1)
@@ -157,7 +176,7 @@ 

          mmd = build.mmd()

          record_module_build_arches(mmd, build)

          arches = [arch.name for arch in build.arches]

-         defaults_added = add_default_modules(mmd, arches)

+         defaults_added = add_default_modules(mmd)

  

          # Format the modulemd by putting in defaults and replacing streams that

          # are branches with commit hashes
@@ -297,7 +316,9 @@ 

          return conf.koji_cg_default_build_tag

  

  

- def wait(config, msg):

+ @celery_app.task

+ @events.mbs_event_handler()

+ def wait(msg_id, module_build_id, module_build_state):

      """ Called whenever a module enters the 'wait' state.

  

      We transition to this state shortly after a modulebuild is first requested.
@@ -305,6 +326,11 @@ 

      All we do here is request preparation of the buildroot.

      The kicking off of individual component builds is handled elsewhere,

      in module_build_service.schedulers.handlers.repos.

+ 

+     :param str msg_id: the original id of the message being handled which is

+         received from the message bus.

+     :param int module_build_id: the module build id.

+     :param int module_build_state: the module build state.

      """

  

      # Wait for the db on the frontend to catch up to the message, otherwise the
@@ -312,7 +338,7 @@ 

      # See https://pagure.io/fm-orchestrator/issue/386

      @module_build_service.utils.retry(interval=10, timeout=120, wait_on=RuntimeError)

      def _get_build_containing_xmd_for_mbs():

-         build = models.ModuleBuild.from_module_event(db_session, msg)

+         build = models.ModuleBuild.get_by_id(db_session, module_build_id)

          if "mbs" in build.mmd().get_xmd():

              return build

          db_session.expire(build)
@@ -323,10 +349,10 @@ 

      log.info("Found build=%r from message" % build)

      log.debug("%r", build.modulemd)

  

-     if build.state != msg.module_build_state:

+     if build.state != module_build_state:

          log.warning(

              "Note that retrieved module state %r doesn't match message module state %r",

-             build.state, msg.module_build_state,

+             build.state, module_build_state,

          )

          # This is ok.. it's a race condition we can ignore.

          pass
@@ -337,7 +363,7 @@ 

          reason = "Failed to get module info from MBS. Max retries reached."

          log.exception(reason)

          build.transition(

-             db_session, config,

+             db_session, conf,

              state=models.BUILD_STATES["failed"],

              state_reason=reason, failure_type="infra")

          db_session.commit()
@@ -365,8 +391,7 @@ 

              "It is disabled to tag module build during importing into Koji by Content Generator.")

          log.debug("Skip to assign Content Generator build koji tag to module build.")

  

-     builder = module_build_service.builder.GenericBuilder.create_from_module(

-         db_session, build, config)

+     builder = GenericBuilder.create_from_module(db_session, build, conf)

  

      log.debug(

          "Adding dependencies %s into buildroot for module %s:%s:%s",
@@ -376,21 +401,20 @@ 

  

      if not build.component_builds:

          log.info("There are no components in module %r, skipping build" % build)

-         build.transition(db_session, config, state=models.BUILD_STATES["build"])

+         build.transition(db_session, conf, state=models.BUILD_STATES["build"])

          db_session.add(build)

          db_session.commit()

          # Return a KojiRepoChange message so that the build can be transitioned to done

          # in the repos handler

-         return [

-             module_build_service.messaging.KojiRepoChange(

-                 "handlers.modules.wait: fake msg", builder.module_build_tag["name"])

-         ]

+         from module_build_service.scheduler.handlers.repos import done as repos_done_handler

+         events.scheduler.add(repos_done_handler, ("fake_msg", builder.module_build_tag["name"]))

+         return

  

      # If all components in module build will be reused, we don't have to build

      # module-build-macros, because there won't be any build done.

      if attempt_to_reuse_all_components(builder, build):

          log.info("All components have been reused for module %r, skipping build" % build)

-         build.transition(db_session, config, state=models.BUILD_STATES["build"])

+         build.transition(db_session, conf, state=models.BUILD_STATES["build"])

          db_session.add(build)

          db_session.commit()

          return []
@@ -402,7 +426,6 @@ 

      artifact_name = "module-build-macros"

  

      component_build = models.ComponentBuild.from_component_name(db_session, artifact_name, build.id)

-     further_work = []

      srpm = builder.get_disttag_srpm(

          disttag=".%s" % get_rpm_release(db_session, build),

          module_build=build)
@@ -419,10 +442,9 @@ 

          # Commit and refresh so that the SQLAlchemy relationships are available

          db_session.commit()

          db_session.refresh(component_build)

-         msgs = builder.recover_orphaned_artifact(component_build)

-         if msgs:

+         recovered = builder.recover_orphaned_artifact(component_build)

+         if recovered:

              log.info("Found an existing module-build-macros build")

-             further_work += msgs

          # There was no existing artifact found, so lets submit the build instead

          else:

              task_id, state, reason, nvr = builder.build(artifact_name=artifact_name, source=srpm)
@@ -434,10 +456,9 @@ 

          # It's possible that the build succeeded in the builder but some other step failed which

          # caused module-build-macros to be marked as failed in MBS, so check to see if it exists

          # first

-         msgs = builder.recover_orphaned_artifact(component_build)

-         if msgs:

+         recovered = builder.recover_orphaned_artifact(component_build)

+         if recovered:

              log.info("Found an existing module-build-macros build")

-             further_work += msgs

          else:

              task_id, state, reason, nvr = builder.build(artifact_name=artifact_name, source=srpm)

              component_build.task_id = task_id
@@ -446,19 +467,16 @@ 

              component_build.nvr = nvr

  

      db_session.add(component_build)

-     build.transition(db_session, config, state=models.BUILD_STATES["build"])

+     build.transition(db_session, conf, state=models.BUILD_STATES["build"])

      db_session.add(build)

      db_session.commit()

  

      # We always have to regenerate the repository.

-     if config.system == "koji":

+     if conf.system == "koji":

          log.info("Regenerating the repository")

          task_id = builder.koji_session.newRepo(builder.module_build_tag["name"])

          build.new_repo_task_id = task_id

          db_session.commit()

      else:

-         further_work.append(

-             module_build_service.messaging.KojiRepoChange(

-                 "fake msg", builder.module_build_tag["name"])

-         )

-     return further_work

+         from module_build_service.scheduler.handlers.repos import done as repos_done_handler

+         events.scheduler.add(repos_done_handler, ("fake_msg", builder.module_build_tag["name"]))

@@ -2,26 +2,33 @@ 

  # SPDX-License-Identifier: MIT

  """ Handlers for repo change events on the message bus. """

  

- import module_build_service.builder

  import logging

  from datetime import datetime

- from module_build_service import models, log

+ from module_build_service import celery_app, conf, models, log

+ from module_build_service.builder import GenericBuilder

  from module_build_service.utils import start_next_batch_build

  from module_build_service.db_session import db_session

+ from module_build_service.scheduler import events

  

  logging.basicConfig(level=logging.DEBUG)

  

  

- def done(config, msg):

-     """ Called whenever koji rebuilds a repo, any repo. """

+ @celery_app.task

+ @events.mbs_event_handler()

+ def done(msg_id, repo_tag):

+     """Called whenever koji rebuilds a repo, any repo.

+ 

+     :param str msg_id: the original id of the message being handled which is

+         received from the message bus.

+     :param str repo_tag: the tag name from which the repo is generated.

+     """

  

      # First, find our ModuleBuild associated with this repo, if any.

-     tag = msg.repo_tag

-     if config.system in ("koji", "test") and not tag.endswith("-build"):

-         log.debug("Tag %r does not end with '-build' suffix, ignoring" % tag)

+     if conf.system in ("koji", "test") and not repo_tag.endswith("-build"):

+         log.debug("Tag %r does not end with '-build' suffix, ignoring", repo_tag)

          return

-     tag = tag[:-6] if tag.endswith("-build") else tag

-     module_build = models.ModuleBuild.from_repo_done_event(db_session, msg)

+     tag = repo_tag[:-6] if repo_tag.endswith("-build") else repo_tag

+     module_build = models.ModuleBuild.get_by_tag(db_session, repo_tag)

      if not module_build:

          log.debug("No module build found associated with koji tag %r" % tag)

          return
@@ -41,7 +48,7 @@ 

  

      # Get the list of untagged components in current/previous batches which

      # have been built successfully

-     if config.system in ("koji", "test") and current_batch:

+     if conf.system in ("koji", "test") and current_batch:

          if any(c.is_completed and not c.is_tagged for c in module_build.up_to_current_batch()):

              log.info("Ignoring repo regen, because not all components are tagged.")

              return
@@ -70,20 +77,18 @@ 

          state_reason = "Component(s) {} failed to build.".format(

              ", ".join(c.package for c in current_batch if c.is_unsuccessful))

          module_build.transition(

-             db_session, config, models.BUILD_STATES["failed"], state_reason, failure_type="infra")

+             db_session, conf, models.BUILD_STATES["failed"], state_reason, failure_type="infra")

          db_session.commit()

          log.warning("Odd!  All components in batch failed for %r." % module_build)

          return

  

-     groups = module_build_service.builder.GenericBuilder.default_buildroot_groups(

-         db_session, module_build)

- 

-     builder = module_build_service.builder.GenericBuilder.create(

+     groups = GenericBuilder.default_buildroot_groups(db_session, module_build)

+     builder = GenericBuilder.create(

          db_session,

          module_build.owner,

          module_build,

-         config.system,

-         config,

+         conf.system,

+         conf,

          tag_name=tag,

          components=[c.package for c in module_build.component_builds],

      )
@@ -101,7 +106,6 @@ 

      has_unbuilt_components = any(c.is_unbuilt for c in module_build.component_builds)

      has_failed_components = any(c.is_unsuccessful for c in module_build.component_builds)

  

-     further_work = []

      if has_unbuilt_components and not has_failed_components:

          # Ok, for the subset of builds that did complete successfully, check to

          # see if they are in the buildroot before starting new batch.
@@ -112,8 +116,7 @@ 

  

          # Try to start next batch build, because there are still unbuilt

          # components in a module.

-         further_work += start_next_batch_build(config, module_build, builder)

- 

+         start_next_batch_build(conf, module_build, builder)

      else:

          if has_failed_components:

              state_reason = "Component(s) {} failed to build.".format(
@@ -123,7 +126,7 @@ 

              )

              module_build.transition(

                  db_session,

-                 config,

+                 conf,

                  state=models.BUILD_STATES["failed"],

                  state_reason=state_reason,

                  failure_type="user",
@@ -133,7 +136,5 @@ 

              module_build.time_completed = datetime.utcnow()

              builder.finalize(succeeded=True)

  

-             module_build.transition(db_session, config, state=models.BUILD_STATES["done"])

+             module_build.transition(db_session, conf, state=models.BUILD_STATES["done"])

          db_session.commit()

- 

-     return further_work

@@ -2,37 +2,47 @@ 

  # SPDX-License-Identifier: MIT

  """ Handlers for repo change events on the message bus. """

  

- import module_build_service.builder

  import logging

  import koji

- from module_build_service import models, log, messaging

+ from module_build_service import celery_app, conf, models, log

  from module_build_service.db_session import db_session

+ from module_build_service.builder import GenericBuilder

+ from module_build_service.scheduler import events

  

  logging.basicConfig(level=logging.DEBUG)

  

  

- def tagged(config, msg):

-     """ Called whenever koji tags a build to tag. """

-     if config.system not in ("koji", "test"):

+ @celery_app.task

+ @events.mbs_event_handler()

+ def tagged(msg_id, tag_name, build_name, build_nvr):

+     """Called whenever koji tags a build to tag.

+ 

+     :param str msg_id: the original id of the message being handled which is

+         received from the message bus.

+     :param str tag_name: the tag name applied.

+     :param str build_name: name of the tagged build.

+     :param str build_nvr: nvr of the tagged build.

+     """

+     if conf.system not in ("koji", "test"):

          return []

  

      # Find our ModuleBuild associated with this tagged artifact.

-     tag = msg.tag

-     module_build = models.ModuleBuild.from_tag_change_event(db_session, msg)

+     module_build = models.ModuleBuild.get_by_tag(db_session, tag_name)

      if not module_build:

-         log.debug("No module build found associated with koji tag %r" % tag)

+         log.debug("No module build found associated with koji tag %r", tag_name)

          return

  

      # Find tagged component.

-     component = models.ComponentBuild.from_component_nvr(db_session, msg.nvr, module_build.id)

+     component = models.ComponentBuild.from_component_nvr(

+         db_session, build_nvr, module_build.id)

      if not component:

-         log.error("No component %s in module %r", msg.nvr, module_build)

+         log.error("No component %s in module %r", build_nvr, module_build)

          return

  

-     log.info("Saw relevant component tag of %r from %r." % (component.nvr, msg.msg_id))

+     log.info("Saw relevant component tag of %r from %r.", component.nvr, msg_id)

  

      # Mark the component as tagged

-     if tag.endswith("-build"):

+     if tag_name.endswith("-build"):

          component.tagged = True

      else:

          component.tagged_in_final = True
@@ -41,16 +51,14 @@ 

      if any(c.is_unbuilt for c in module_build.current_batch()):

          log.info(

              "Not regenerating repo for tag %s, there are still building components in a batch",

-             tag,

+             tag_name,

          )

          return []

  

-     further_work = []

- 

      # If all components are tagged, start newRepo task.

      if not any(c.is_completed and not c.is_tagged for c in module_build.up_to_current_batch()):

-         builder = module_build_service.builder.GenericBuilder.create_from_module(

-             db_session, module_build, config)

+         builder = GenericBuilder.create_from_module(

+             db_session, module_build, conf)

  

          if any(c.is_unbuilt for c in module_build.component_builds):

              if not _is_new_repo_generating(module_build, builder.koji_session):
@@ -69,14 +77,11 @@ 

              # would be useless to wait for a repository we will not use anyway.

              log.info(

                  "All components in module tagged and built, skipping the last repo regeneration")

-             further_work += [

-                 messaging.KojiRepoChange(

-                     "components::_finalize: fake msg", builder.module_build_tag["name"])

-             ]

+             from module_build_service.scheduler.handlers.repos import done as repos_done_handler

+             events.scheduler.add(

+                 repos_done_handler, ("fake_msg", builder.module_build_tag["name"]))

          db_session.commit()

  

-     return further_work

- 

  

  def _is_new_repo_generating(module_build, koji_session):

      """ Return whether or not a new repo is already being generated. """

@@ -0,0 +1,65 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import logging

+ from module_build_service import models

+ 

+ from module_build_service.db_session import db_session

+ from module_build_service.scheduler.handlers.modules import init as modules_init_handler

+ from module_build_service.scheduler.handlers.modules import wait as modules_wait_handler

+ from module_build_service.scheduler.handlers.modules import done as modules_done_handler

+ from module_build_service.scheduler.handlers.modules import failed as modules_failed_handler

+ 

+ log = logging.getLogger(__name__)

+ 

+ 

+ """

+ This module contains functions to control fedmsg-hub running locally for local

+ module build and running tests within test_build.py particularly.

+ """

+ 

+ __all__ = ["main"]

+ 

+ 

+ def raise_for_failed_build(module_build_ids):

+     """

+     Raises an exception if any module build from `module_build_ids` list is in failed state.

+     This function also calls "failed" handler before raises an exception.

+ 

+     :param list module_build_ids: List of module build IDs (int) to build locally.

+     """

+     builds = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.id.in_(module_build_ids)).all()

+     has_failed_build = False

+     for build in builds:

+         if build.state == models.BUILD_STATES["failed"]:

+             modules_failed_handler("fake_msg_id", build.id, "failed")

+             has_failed_build = True

+     if has_failed_build:

+         raise ValueError("Local module build failed.")

+ 

+ 

+ def main(module_build_ids):

+     """

+     Build modules locally. The modules have to be stored in the local database before

+     calling this function.

+ 

+     :param list module_build_ids: List of module build IDs (int) to build locally.

+     """

+     # The transition between states is normally handled by ModuleBuild.transition, which sends

+     # a message to message bus. The message is then received by the Consumer and handler is called.

+     # But for local builds, we do not have any message bus, so we just call the handlers in

+     # the right order manually.

+     # We only need to ensure that we won't call futher handlers in case the module build failed.

+     for module_build_id in module_build_ids:

+         modules_init_handler("fake_msg_id", module_build_id, "init")

+ 

+     raise_for_failed_build(module_build_ids)

+     for module_build_id in module_build_ids:

+         modules_wait_handler("fake_msg_id", module_build_id, "wait")

+ 

+     raise_for_failed_build(module_build_ids)

+     for module_build_id in module_build_ids:

+         modules_done_handler("fake_msg_id", module_build_id, "done")

+ 

+     raise_for_failed_build(module_build_ids)

@@ -0,0 +1,130 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import re

+ 

+ from module_build_service import log

+ from module_build_service.errors import IgnoreMessage

+ from module_build_service.scheduler import events

+ 

+ 

+ class MessageParser(object):

+     """Base class for parsing messages received from a specific message bus

+ 

+     :param topic_categories: list of known services, that MBS can handle the

+         messages sent from them. For example, a value could be

+         ``["buildsys", "mbs", "greenwave"]``.

+     :type topic_categories: list[str]

+     """

+ 

+     def __init__(self, topic_categories):

+         self.topic_categories = topic_categories

+ 

+     def parse(self, msg):

+         raise NotImplementedError()

+ 

+ 

+ class FedmsgMessageParser(MessageParser):

+ 

+     def parse(self, msg):

+         """

+         Parse a received message and convert it to a consistent format

+ 

+         :param dict msg: the message contents from the message bus.

+         :return: a mapping representing the corresponding event.

+             If the topic isn't recognized, None is returned.

+         :rtype: dict or None

+         """

+ 

+         if "body" in msg:

+             msg = msg["body"]

+         topic = msg["topic"]

+         categories_re = "|".join(map(re.escape, self.topic_categories))

+         regex_pattern = re.compile(

+             r"(?P<category>" + categories_re + r")"

+             r"(?:(?:\.)(?P<object>build|repo|module|decision))?"

+             r"(?:(?:\.)(?P<subobject>state|build))?"

+             r"(?:\.)(?P<event>change|done|end|tag|update)$"

+         )

+         regex_results = re.search(regex_pattern, topic)

+ 

+         if regex_results:

+             category = regex_results.group("category")

+             object = regex_results.group("object")

+             subobject = regex_results.group("subobject")

+             event = regex_results.group("event")

+ 

+             msg_id = msg.get("msg_id")

+             msg_inner_msg = msg.get("msg")

+ 

+             # If there isn't a msg dict in msg then this message can be skipped

+             if not msg_inner_msg:

+                 log.debug(

+                     "Skipping message without any content with the " 'topic "{0}"'.format(topic))

+                 return None

+ 

+             # Ignore all messages from the secondary koji instances.

+             if category == "buildsys":

+                 instance = msg_inner_msg.get("instance", "primary")

+                 if instance != "primary":

+                     log.debug("Ignoring message from %r koji hub." % instance)

+                     return

+ 

+                 if object == "build" and subobject == "state" and event == "change":

+                     task_id = msg_inner_msg.get("task_id")

+                     if task_id is None:

+                         raise IgnoreMessage(

+                             "Ignore message {}, with has a null task_id.".format(msg_id))

+                     return {

+                         "msg_id": msg_id,

+                         "event": events.KOJI_BUILD_CHANGE,

+                         "build_id": msg_inner_msg.get("build_id"),

+                         "task_id": task_id,

+                         "build_new_state": msg_inner_msg.get("new"),

+                         "build_name": msg_inner_msg.get("name"),

+                         "build_version": msg_inner_msg.get("version"),

+                         "build_release": msg_inner_msg.get("release"),

+                         "module_build_id": None,

+                         "state_reason": None,

+                     }

+ 

+                 if object == "repo" and subobject is None and event == "done":

+                     return {

+                         "msg_id": msg_id,

+                         "event": events.KOJI_REPO_CHANGE,

+                         "repo_tag": msg_inner_msg.get("tag")

+                     }

+ 

+                 if event == "tag":

+                     name = msg_inner_msg.get("name")

+                     version = msg_inner_msg.get("version")

+                     release = msg_inner_msg.get("release")

+                     nvr = None

+                     if name and version and release:

+                         nvr = "-".join((name, version, release))

+                     return {

+                         "msg_id": msg_id,

+                         "event": events.KOJI_TAG_CHANGE,

+                         "tag_name": msg_inner_msg.get("tag"),

+                         "build_name": msg_inner_msg.get("name"),

+                         "build_nvr": nvr,

+                     }

+ 

+             if (category == "mbs"

+                     and object == "module" and subobject == "state" and event == "change"):

+                 return {

+                     "msg_id": msg_id,

+                     "event": events.MBS_MODULE_STATE_CHANGE,

+                     "module_build_id": msg_inner_msg.get("id"),

+                     "module_build_state": msg_inner_msg.get("state"),

+                 }

+ 

+             if (category == "greenwave"

+                     and object == "decision" and subobject is None and event == "update"):

+                 return {

+                     "msg_id": msg_id,

+                     "event": events.GREENWAVE_DECISION_UPDATE,

+                     "decision_context": msg_inner_msg.get("decision_context"),

+                     "policies_satisfied": msg_inner_msg.get("policies_satisfied"),

+                     "subject_identifier": msg_inner_msg.get("subject_identifier"),

+                 }

@@ -1,486 +1,470 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

- """ The PollingProducer class that acts as a producer entry point for

- fedmsg-hub. This class polls the database for tasks to do.

- """

  

  import koji

  import operator

  from datetime import timedelta, datetime

- from sqlalchemy.orm import lazyload

- from moksha.hub.api.producer import PollingProducer

+ from sqlalchemy.orm import lazyload, load_only

  

  import module_build_service.messaging

  import module_build_service.scheduler

  import module_build_service.scheduler.consumer

- from module_build_service import conf, models, log

+ from module_build_service import celery_app, conf, models, log

  from module_build_service.builder import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  from module_build_service.utils.greenwave import greenwave

  from module_build_service.db_session import db_session

+ from module_build_service.scheduler.consumer import ON_MODULE_CHANGE_HANDLERS

+ from module_build_service.scheduler.handlers.components import build_task_finalize

+ from module_build_service.scheduler.handlers.tags import tagged

+ 

+ 

+ @celery_app.on_after_configure.connect

+ def setup_periodic_tasks(sender, **kwargs):

+     tasks = (

+         (log_summary, "Log summary of module builds and component builds"),

+         (process_waiting_module_builds, "Process waiting module builds"),

+         (fail_lost_builds, "Fail lost builds"),

+         (process_paused_module_builds, "Process paused module builds"),

+         (delete_old_koji_targets, "Delete old koji targets"),

+         (cleanup_stale_failed_builds, "Cleanup stale failed builds"),

+         (cancel_stuck_module_builds, "Cancel stuck module builds"),

+         (sync_koji_build_tags, "Sync Koji build tags"),

+         (poll_greenwave, "Gating module build to ready state"),

+     )

+ 

+     for task, name in tasks:

+         sender.add_periodic_task(conf.polling_interval, task.s(), name=name)

+ 

+ 

+ @celery_app.task

+ def log_summary():

+     states = sorted(models.BUILD_STATES.items(), key=operator.itemgetter(1))

+     for name, code in states:

+         query = db_session.query(models.ModuleBuild).filter_by(state=code)

+         count = query.count()

+         if count:

+             log.info("  * %s module builds in the %s state", count, name)

+         if name == "build":

+             for module_build in query.all():

+                 log.info("    * %r", module_build)

+                 # First batch is number '1'.

+                 for i in range(1, module_build.batch + 1):

+                     n = len([c for c in module_build.component_builds if c.batch == i])

+                     log.info("      * %s components in batch %s", n, i)

+ 

+ 

+ @celery_app.task

+ def process_waiting_module_builds():

+     for state in ["init", "wait"]:

+         nudge_module_builds_in_state(state, 10)

+ 

+ 

+ def nudge_module_builds_in_state(state_name, older_than_minutes):

+     """

+     Finds all the module builds in the `state` with `time_modified` older

+     than `older_than_minutes` and adds fake MBSModule message to the

+     work queue.

+     """

+     log.info("Looking for module builds stuck in the %s state", state_name)

+     builds = models.ModuleBuild.by_state(db_session, state_name)

+     log.info(" %r module builds in the %s state...", len(builds), state_name)

+     now = datetime.utcnow()

+     time_modified_threshold = timedelta(minutes=older_than_minutes)

+     for build in builds:

+ 

+         # Only give builds a nudge if stuck for more than ten minutes

+         if (now - build.time_modified) < time_modified_threshold:

+             continue

+ 

+         # Pretend the build is modified, so we don't tight spin.

+         build.time_modified = now

+         db_session.commit()

  

+         # Fake a message to kickstart the build anew in the consumer

+         state = module_build_service.models.BUILD_STATES[state_name]

+         handler = ON_MODULE_CHANGE_HANDLERS[state]

+         handler.delay("internal:mbs.module.state.change", build.id, state)

  

- class MBSProducer(PollingProducer):

-     frequency = timedelta(seconds=conf.polling_interval)

- 

-     def poll(self):

-         try:

-             self.log_summary()

-             self.process_waiting_module_builds()

-             self.process_open_component_builds()

-             self.fail_lost_builds()

-             self.process_paused_module_builds(conf)

-             self.retrigger_new_repo_on_failure(conf)

-             self.delete_old_koji_targets(conf)

-             self.cleanup_stale_failed_builds(conf)

-             self.sync_koji_build_tags(conf)

-             self.poll_greenwave(conf)

-         except Exception:

-             msg = "Error in poller execution:"

-             log.exception(msg)

- 

-         # Poller runs in its own thread. Database session can be removed safely.

-         db_session.remove()

- 

-         log.info('Poller will now sleep for "{}" seconds'.format(conf.polling_interval))

- 

-     def fail_lost_builds(self):

-         # This function is supposed to be handling only the part which can't be

-         # updated through messaging (e.g. srpm-build failures). Please keep it

-         # fit `n` slim. We do want rest to be processed elsewhere

-         # TODO re-use

- 

-         if conf.system == "koji":

-             # We don't do this on behalf of users

-             koji_session = KojiModuleBuilder.get_session(conf, login=False)

-             log.info("Querying tasks for statuses:")

-             res = (

-                 db_session.query(models.ComponentBuild)

-                 .filter_by(state=koji.BUILD_STATES["BUILDING"])

-                 .options(lazyload("module_build"))

-                 .all()

-             )

- 

-             log.info("Checking status for {0} tasks".format(len(res)))

-             for component_build in res:

-                 log.debug(component_build.json(db_session))

-                 # Don't check tasks which haven't been triggered yet

-                 if not component_build.task_id:

-                     continue

- 

-                 # Don't check tasks for components which have been reused,

-                 # they may have BUILDING state temporarily before we tag them

-                 # to new module tag. Checking them would be waste of resources.

-                 if component_build.reused_component_id:

-                     log.debug(

-                         'Skipping check for task "{0}", '

-                         'the component has been reused ("{1}").'.format(

-                             component_build.task_id, component_build.reused_component_id)

-                     )

-                     continue

- 

-                 task_id = component_build.task_id

- 

-                 log.info('Checking status of task_id "{0}"'.format(task_id))

-                 task_info = koji_session.getTaskInfo(task_id)

- 

-                 state_mapping = {

-                     # Cancelled and failed builds should be marked as failed.

-                     koji.TASK_STATES["CANCELED"]: koji.BUILD_STATES["FAILED"],

-                     koji.TASK_STATES["FAILED"]: koji.BUILD_STATES["FAILED"],

-                     # Completed tasks should be marked as complete.

-                     koji.TASK_STATES["CLOSED"]: koji.BUILD_STATES["COMPLETE"],

-                 }

- 

-                 # If it is a closed/completed task, then we can extract the NVR

-                 build_version, build_release = None, None  # defaults

-                 if task_info["state"] == koji.TASK_STATES["CLOSED"]:

-                     builds = koji_session.listBuilds(taskID=task_id)

-                     if not builds:

-                         log.warning(

-                             "Task ID %r is closed, but we found no builds in koji." % task_id)

-                     elif len(builds) > 1:

-                         log.warning(

-                             "Task ID %r is closed, but more than one build is present!" % task_id)

-                     else:

-                         build_version = builds[0]["version"]

-                         build_release = builds[0]["release"]

- 

-                 log.info("  task {0!r} is in state {1!r}".format(task_id, task_info["state"]))

-                 if task_info["state"] in state_mapping:

-                     # Fake a fedmsg message on our internal queue

-                     msg = module_build_service.messaging.KojiBuildChange(

-                         msg_id="producer::fail_lost_builds fake msg",

-                         build_id=component_build.task_id,

-                         task_id=component_build.task_id,

-                         build_name=component_build.package,

-                         build_new_state=state_mapping[task_info["state"]],

-                         build_release=build_release,

-                         build_version=build_version,

-                     )

-                     module_build_service.scheduler.consumer.work_queue_put(msg)

- 

-         elif conf.system == "mock":

-             pass

- 

-     def cleanup_stale_failed_builds(self, conf):

-         """ Does various clean up tasks on stale failed module builds

-         :param conf: the MBS configuration object

-         :param db_session: a SQLAlchemy database session

-         """

-         if conf.system == "koji":

-             stale_date = datetime.utcnow() - timedelta(days=conf.cleanup_failed_builds_time)

-             stale_module_builds = (

-                 db_session.query(models.ModuleBuild)

-                 .filter(

-                     models.ModuleBuild.state == models.BUILD_STATES["failed"],

-                     models.ModuleBuild.time_modified <= stale_date,

-                 )

-                 .all()

-             )

-             if stale_module_builds:

-                 log.info(

-                     "{0} stale failed module build(s) will be cleaned up".format(

-                         len(stale_module_builds))

-                 )

-             for module in stale_module_builds:

-                 log.info("{0!r} is stale and is being cleaned up".format(module))

-                 # Find completed artifacts in the stale build

-                 artifacts = [c for c in module.component_builds if c.is_completed]

-                 # If there are no completed artifacts, then there is nothing to tag

-                 if artifacts:

-                     # Set buildroot_connect=False so it doesn't recreate the Koji target and etc.

-                     builder = GenericBuilder.create_from_module(

-                         db_session, module, conf, buildroot_connect=False

-                     )

-                     builder.untag_artifacts([c.nvr for c in artifacts])

-                     # Mark the artifacts as untagged in the database

-                     for c in artifacts:

-                         c.tagged = False

-                         c.tagged_in_final = False

-                         db_session.add(c)

-                 state_reason = (

-                     "The module was garbage collected since it has failed over {0}"

-                     " day(s) ago".format(conf.cleanup_failed_builds_time)

-                 )

-                 module.transition(

-                     db_session,

-                     conf,

-                     models.BUILD_STATES["garbage"],

-                     state_reason=state_reason,

-                     failure_type="user",

-                 )

-                 db_session.add(module)

-                 db_session.commit()

- 

-     def log_summary(self):

-         log.info("Current status:")

-         consumer = module_build_service.scheduler.consumer.get_global_consumer()

-         backlog = consumer.incoming.qsize()

-         log.info("  * internal queue backlog is {0}".format(backlog))

-         states = sorted(models.BUILD_STATES.items(), key=operator.itemgetter(1))

-         for name, code in states:

-             query = db_session.query(models.ModuleBuild).filter_by(state=code)

-             count = query.count()

-             if count:

-                 log.info("  * {0} module builds in the {1} state".format(count, name))

-             if name == "build":

-                 for module_build in query.all():

-                     log.info("    * {0!r}".format(module_build))

-                     # First batch is number '1'.

-                     for i in range(1, module_build.batch + 1):

-                         n = len([c for c in module_build.component_builds if c.batch == i])

-                         log.info("      * {0} components in batch {1}".format(n, i))

- 

-     def _nudge_module_builds_in_state(self, state_name, older_than_minutes):

-         """

-         Finds all the module builds in the `state` with `time_modified` older

-         than `older_than_minutes` and adds fake MBSModule message to the

-         work queue.

-         """

-         log.info("Looking for module builds stuck in the %s state", state_name)

-         builds = models.ModuleBuild.by_state(db_session, state_name)

-         log.info(" %r module builds in the %s state...", len(builds), state_name)

-         now = datetime.utcnow()

-         time_modified_threshold = timedelta(minutes=older_than_minutes)

-         for build in builds:

- 

-             # Only give builds a nudge if stuck for more than ten minutes

-             if (now - build.time_modified) < time_modified_threshold:

-                 continue

- 

-             # Pretend the build is modified, so we don't tight spin.

-             build.time_modified = now

  

-             # Fake a message to kickstart the build anew in the consumer

-             state = module_build_service.models.BUILD_STATES[state_name]

-             msg = module_build_service.messaging.MBSModule(

-                 "nudge_module_builds_fake_message", build.id, state)

-             log.info("  Scheduling faked event %r" % msg)

-             module_build_service.scheduler.consumer.work_queue_put(msg)

+ def process_open_component_builds():

+     log.warning("process_open_component_builds is not yet implemented...")

  

-         db_session.commit()

  

-     def process_waiting_module_builds(self):

-         for state in ["init", "wait"]:

-             self._nudge_module_builds_in_state(state, 10)

+ @celery_app.task

+ def fail_lost_builds():

+     # This function is supposed to be handling only the part which can't be

+     # updated through messaging (e.g. srpm-build failures). Please keep it

+     # fit `n` slim. We do want rest to be processed elsewhere

+     # TODO re-use

  

-     def process_open_component_builds(self):

-         log.warning("process_open_component_builds is not yet implemented...")

+     if conf.system == "koji":

+         # We don't do this on behalf of users

+         koji_session = KojiModuleBuilder.get_session(conf, login=False)

+         log.info("Querying tasks for statuses:")

+         res = db_session.query(models.ComponentBuild).filter_by(

+             state=koji.BUILD_STATES["BUILDING"]

+         ).options(lazyload("module_build")).all()

+ 

+         log.info("Checking status for %s tasks", len(res))

+         for component_build in res:

+             log.debug(component_build.json(db_session))

+             # Don't check tasks which haven't been triggered yet

+             if not component_build.task_id:

+                 continue

  

-     def process_paused_module_builds(self, config):

-         log.info("Looking for paused module builds in the build state")

-         if module_build_service.utils.at_concurrent_component_threshold(config):

-             log.debug(

-                 "Will not attempt to start paused module builds due to "

-                 "the concurrent build threshold being met"

-             )

-             return

- 

-         ten_minutes = timedelta(minutes=10)

-         # Check for module builds that are in the build state but don't have any active component

-         # builds. Exclude module builds in batch 0. This is likely a build of a module without

-         # components.

-         module_builds = (

-             db_session.query(models.ModuleBuild)

-             .filter(

-                 models.ModuleBuild.state == models.BUILD_STATES["build"],

-                 models.ModuleBuild.batch > 0,

-             )

-             .all()

-         )

-         for module_build in module_builds:

-             now = datetime.utcnow()

-             # Only give builds a nudge if stuck for more than ten minutes

-             if (now - module_build.time_modified) < ten_minutes:

+             # Don't check tasks for components which have been reused,

+             # they may have BUILDING state temporarily before we tag them

+             # to new module tag. Checking them would be waste of resources.

+             if component_build.reused_component_id:

+                 log.debug(

+                     'Skipping check for task "%s", the component has been reused ("%s").',

+                     component_build.task_id, component_build.reused_component_id

+                 )

                  continue

-             # If there are no components in the build state on the module build,

-             # then no possible event will start off new component builds.

-             # But do not try to start new builds when we are waiting for the

-             # repo-regen.

-             if not module_build.current_batch(koji.BUILD_STATES["BUILDING"]):

-                 # Initialize the builder...

-                 builder = GenericBuilder.create_from_module(db_session, module_build, config)

- 

-                 if _has_missed_new_repo_message(module_build, builder.koji_session):

-                     log.info("  Processing the paused module build %r", module_build)

-                     further_work = module_build_service.utils.start_next_batch_build(

-                         config, module_build, builder)

-                     for event in further_work:

-                         log.info("  Scheduling faked event %r" % event)

-                         module_build_service.scheduler.consumer.work_queue_put(event)

- 

-             # Check if we have met the threshold.

-             if module_build_service.utils.at_concurrent_component_threshold(config, db_session):

-                 break

  

-     def retrigger_new_repo_on_failure(self, config):

-         """

-         Retrigger failed new repo tasks for module builds in the build state.

+             task_id = component_build.task_id

+ 

+             log.info('Checking status of task_id "%s"', task_id)

+             task_info = koji_session.getTaskInfo(task_id)

+ 

+             state_mapping = {

+                 # Cancelled and failed builds should be marked as failed.

+                 koji.TASK_STATES["CANCELED"]: koji.BUILD_STATES["FAILED"],

+                 koji.TASK_STATES["FAILED"]: koji.BUILD_STATES["FAILED"],

+                 # Completed tasks should be marked as complete.

+                 koji.TASK_STATES["CLOSED"]: koji.BUILD_STATES["COMPLETE"],

+             }

+ 

+             # If it is a closed/completed task, then we can extract the NVR

+             build_version, build_release = None, None  # defaults

+             if task_info["state"] == koji.TASK_STATES["CLOSED"]:

+                 builds = koji_session.listBuilds(taskID=task_id)

+                 if not builds:

+                     log.warning(

+                         "Task ID %r is closed, but we found no builds in koji.", task_id)

+                 elif len(builds) > 1:

+                     log.warning(

+                         "Task ID %r is closed, but more than one build is present!", task_id)

+                 else:

+                     build_version = builds[0]["version"]

+                     build_release = builds[0]["release"]

+ 

+             log.info("  task %r is in state %r", task_id, task_info["state"])

+             if task_info["state"] in state_mapping:

+                 build_task_finalize.delay(

+                     msg_id="producer::fail_lost_builds fake msg",

+                     build_id=component_build.task_id,

+                     task_id=component_build.task_id,

+                     build_new_state=state_mapping[task_info["state"]],

+                     build_name=component_build.package,

+                     build_release=build_release,

+                     build_version=build_version,

+                 )

  

-         The newRepo task may fail for various reasons outside the scope of MBS.

-         This method will detect this scenario and retrigger the newRepo task

-         if needed to avoid the module build from being stuck in the "build" state.

-         """

-         if config.system != "koji":

-             return

+     elif conf.system == "mock":

+         pass

  

-         koji_session = module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session(

-             config)

  

-         for module_build in (

-             db_session.query(models.ModuleBuild).filter_by(state=models.BUILD_STATES["build"]).all()

-         ):

-             if not module_build.new_repo_task_id:

-                 continue

+ @celery_app.task

+ def process_paused_module_builds():

+     log.info("Looking for paused module builds in the build state")

+     if module_build_service.utils.at_concurrent_component_threshold(conf):

+         log.debug(

+             "Will not attempt to start paused module builds due to "

+             "the concurrent build threshold being met"

+         )

+         return

+ 

+     ten_minutes = timedelta(minutes=10)

+     # Check for module builds that are in the build state but don't have any active component

+     # builds. Exclude module builds in batch 0. This is likely a build of a module without

+     # components.

+     module_builds = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.state == models.BUILD_STATES["build"],

+         models.ModuleBuild.batch > 0,

+     ).all()

+     for module_build in module_builds:

+         now = datetime.utcnow()

+         # Only give builds a nudge if stuck for more than ten minutes

+         if (now - module_build.time_modified) < ten_minutes:

+             continue

+         # If there are no components in the build state on the module build,

+         # then no possible event will start off new component builds.

+         # But do not try to start new builds when we are waiting for the

+         # repo-regen.

+         if not module_build.current_batch(koji.BUILD_STATES["BUILDING"]):

+             # Initialize the builder...

+             builder = GenericBuilder.create_from_module(

+                 db_session, module_build, conf)

+ 

+             if has_missed_new_repo_message(module_build, builder.koji_session):

+                 log.info("  Processing the paused module build %r", module_build)

+                 module_build_service.utils.start_next_batch_build(

+                     conf, module_build, builder)

+ 

+         # Check if we have met the threshold.

+         if module_build_service.utils.at_concurrent_component_threshold(conf):

+             break

+ 

+ 

+ @celery_app.task

+ def retrigger_new_repo_on_failure():

+     """

+     Retrigger failed new repo tasks for module builds in the build state.

  

-             task_info = koji_session.getTaskInfo(module_build.new_repo_task_id)

-             if task_info["state"] in [koji.TASK_STATES["CANCELED"], koji.TASK_STATES["FAILED"]]:

-                 log.info(

-                     "newRepo task %s for %r failed, starting another one",

-                     str(module_build.new_repo_task_id), module_build,

-                 )

-                 taginfo = koji_session.getTag(module_build.koji_tag + "-build")

-                 module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"])

+     The newRepo task may fail for various reasons outside the scope of MBS.

+     This method will detect this scenario and retrigger the newRepo task

+     if needed to avoid the module build from being stuck in the "build" state.

+     """

+     if conf.system != "koji":

+         return

+ 

+     koji_session = KojiModuleBuilder.get_session(conf)

+     module_builds = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.state == models.BUILD_STATES["build"],

+         models.ModuleBuild.new_repo_task_id.isnot(None),

+     ).all()

+ 

+     for module_build in module_builds:

+         task_info = koji_session.getTaskInfo(module_build.new_repo_task_id)

+         if task_info["state"] in [koji.TASK_STATES["CANCELED"], koji.TASK_STATES["FAILED"]]:

+             log.info(

+                 "newRepo task %s for %r failed, starting another one",

+                 str(module_build.new_repo_task_id), module_build,

+             )

+             taginfo = koji_session.getTag(module_build.koji_tag + "-build")

+             module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"])

+ 

+     db_session.commit()

  

-         db_session.commit()

  

-     def delete_old_koji_targets(self, config):

-         """

-         Deletes targets older than `config.koji_target_delete_time` seconds

-         from Koji to cleanup after the module builds.

-         """

-         if config.system != "koji":

-             return

+ @celery_app.task

+ def delete_old_koji_targets():

+     """

+     Deletes targets older than `config.koji_target_delete_time` seconds

+     from Koji to cleanup after the module builds.

+     """

+     if conf.system != "koji":

+         return

+ 

+     log.info("Looking for module builds which Koji target can be removed")

+ 

+     now = datetime.utcnow()

+ 

+     koji_session = KojiModuleBuilder.get_session(conf)

+     for target in koji_session.getBuildTargets():

+         module = db_session.query(models.ModuleBuild).filter(

+             models.ModuleBuild.koji_tag == target["dest_tag_name"],

+             models.ModuleBuild.name.notin_(conf.base_module_names),

+             models.ModuleBuild.state.notin_([

+                 models.BUILD_STATES["init"],

+                 models.BUILD_STATES["wait"],

+                 models.BUILD_STATES["build"],

+             ]),

+         ).options(

+             load_only("time_completed"),

+         ).first()

+ 

+         if module is None:

+             continue

+ 

+         # Double-check that the target we are going to remove is prefixed

+         # by our prefix, so we won't remove f26 when there is some garbage

+         # in DB or Koji.

+         for allowed_prefix in conf.koji_tag_prefixes:

+             if target["name"].startswith(allowed_prefix + "-"):

+                 break

+         else:

+             log.error("Module %r has Koji target with not allowed prefix.", module)

+             continue

  

-         log.info("Looking for module builds which Koji target can be removed")

+         delta = now - module.time_completed

+         if delta.total_seconds() > conf.koji_target_delete_time:

+             log.info("Removing target of module %r", module)

+             koji_session.deleteBuildTarget(target["id"])

  

-         now = datetime.utcnow()

  

-         koji_session = KojiModuleBuilder.get_session(config)

-         for target in koji_session.getBuildTargets():

-             koji_tag = target["dest_tag_name"]

-             module = db_session.query(models.ModuleBuild).filter_by(koji_tag=koji_tag).first()

-             if (

-                 not module

-                 or module.name in conf.base_module_names

-                 or module.state in [

-                     models.BUILD_STATES["init"],

-                     models.BUILD_STATES["wait"],

-                     models.BUILD_STATES["build"],

-                 ]

-             ):

-                 continue

+ @celery_app.task

+ def cleanup_stale_failed_builds():

+     """Does various clean up tasks on stale failed module builds"""

  

-             # Double-check that the target we are going to remove is prefixed

-             # by our prefix, so we won't remove f26 when there is some garbage

-             # in DB or Koji.

-             for allowed_prefix in config.koji_tag_prefixes:

-                 if target["name"].startswith(allowed_prefix + "-"):

-                     break

-             else:

-                 log.error("Module %r has Koji target with not allowed prefix.", module)

-                 continue

+     if conf.system != "koji":

+         return

  

-             delta = now - module.time_completed

-             if delta.total_seconds() > config.koji_target_delete_time:

-                 log.info("Removing target of module %r", module)

-                 koji_session.deleteBuildTarget(target["id"])

- 

-     def cancel_stuck_module_builds(self, config):

-         """

-         Method transitions builds which are stuck in one state too long to the "failed" state.

-         The states are defined with the "cleanup_stuck_builds_states" config option and the

-         time is defined by the "cleanup_stuck_builds_time" config option.

-         """

+     stale_date = datetime.utcnow() - timedelta(days=conf.cleanup_failed_builds_time)

+     stale_module_builds = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.state == models.BUILD_STATES["failed"],

+         models.ModuleBuild.time_modified <= stale_date,

+     ).all()

+     if stale_module_builds:

          log.info(

-             'Looking for module builds stuck in the states "{states}" more than {days} days'

-             .format(

-                 states=" and ".join(config.cleanup_stuck_builds_states),

-                 days=config.cleanup_stuck_builds_time,

-             )

+             "%s stale failed module build(s) will be cleaned up",

+             len(stale_module_builds)

          )

- 

-         delta = timedelta(days=config.cleanup_stuck_builds_time)

-         now = datetime.utcnow()

-         threshold = now - delta

-         states = [

-             module_build_service.models.BUILD_STATES[state]

-             for state in config.cleanup_stuck_builds_states

-         ]

- 

-         module_builds = (

-             db_session.query(models.ModuleBuild)

-             .filter(

-                 models.ModuleBuild.state.in_(states), models.ModuleBuild.time_modified < threshold

+     for module in stale_module_builds:

+         log.info("%r is stale and is being cleaned up", module)

+         # Find completed artifacts in the stale build

+         artifacts = [c for c in module.component_builds if c.is_completed]

+         # If there are no completed artifacts, then there is nothing to tag

+         if artifacts:

+             # Set buildroot_connect=False so it doesn't recreate the Koji target and etc.

+             builder = GenericBuilder.create_from_module(

+                 db_session, module, conf, buildroot_connect=False

              )

-             .all()

+             builder.untag_artifacts([c.nvr for c in artifacts])

+             # Mark the artifacts as untagged in the database

+             for c in artifacts:

+                 c.tagged = False

+                 c.tagged_in_final = False

+                 db_session.add(c)

+         state_reason = (

+             "The module was garbage collected since it has failed over {0}"

+             " day(s) ago".format(conf.cleanup_failed_builds_time)

          )

+         module.transition(

+             db_session,

+             conf,

+             models.BUILD_STATES["garbage"],

+             state_reason=state_reason,

+             failure_type="user",

+         )

+         db_session.add(module)

+         db_session.commit()

  

-         log.info(" {0!r} module builds are stuck...".format(len(module_builds)))

  

-         for build in module_builds:

-             nsvc = ":".join([build.name, build.stream, build.version, build.context])

-             log.info('Transitioning build "{nsvc}" to "Failed" state.'.format(nsvc=nsvc))

+ @celery_app.task

+ def cancel_stuck_module_builds():

+     """

+     Method transitions builds which are stuck in one state too long to the "failed" state.

+     The states are defined with the "cleanup_stuck_builds_states" config option and the

+     time is defined by the "cleanup_stuck_builds_time" config option.

+     """

+     log.info(

+         'Looking for module builds stuck in the states "%s" more than %s days',

+         " and ".join(conf.cleanup_stuck_builds_states),

+         conf.cleanup_stuck_builds_time,

+     )

+ 

+     threshold = datetime.utcnow() - timedelta(days=conf.cleanup_stuck_builds_time)

+     states = [

+         module_build_service.models.BUILD_STATES[state]

+         for state in conf.cleanup_stuck_builds_states

+     ]

+ 

+     module_builds = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.state.in_(states),

+         models.ModuleBuild.time_modified < threshold

+     ).all()

+ 

+     log.info(" %s module builds are stuck...", len(module_builds))

+ 

+     for build in module_builds:

+         log.info(

+             'Transitioning build "%s:%s:%s:%s" to "Failed" state.',

+             build.name, build.stream, build.version, build.context

+         )

+         state_reason = "The module was in {} for more than {} days".format(

+             build.state, conf.cleanup_stuck_builds_time

+         )

+         build.transition(

+             db_session,

+             conf,

+             state=models.BUILD_STATES["failed"],

+             state_reason=state_reason,

+             failure_type="user",

+         )

+         db_session.commit()

  

-             state_reason = "The module was in {state} for more than {days} days".format(

-                 state=build.state, days=config.cleanup_stuck_builds_time

-             )

-             build.transition(

-                 db_session,

-                 config,

-                 state=models.BUILD_STATES["failed"],

-                 state_reason=state_reason,

-                 failure_type="user",

-             )

-             db_session.commit()

  

-     def sync_koji_build_tags(self, config):

-         """

-         Method checking the "tagged" and "tagged_in_final" attributes of

-         "complete" ComponentBuilds in the current batch of module builds

-         in "building" state against the Koji.

+ @celery_app.task

+ def sync_koji_build_tags():

+     """

+     Method checking the "tagged" and "tagged_in_final" attributes of

+     "complete" ComponentBuilds in the current batch of module builds

+     in "building" state against the Koji.

  

-         In case the Koji shows the build as tagged/tagged_in_final,

-         fake "tagged" message is added to work queue.

-         """

-         if conf.system != "koji":

-             return

+     In case the Koji shows the build as tagged/tagged_in_final,

+     fake "tagged" message is added to work queue.

+     """

+     if conf.system != "koji":

+         return

+ 

+     koji_session = KojiModuleBuilder.get_session(conf, login=False)

+ 

+     threshold = datetime.utcnow() - timedelta(minutes=10)

+     module_builds = db_session.query(models.ModuleBuild).filter(

+         models.ModuleBuild.time_modified < threshold,

+         models.ModuleBuild.state == models.BUILD_STATES["build"]

+     ).all()

+     for module_build in module_builds:

+         complete_components = module_build.current_batch(koji.BUILD_STATES["COMPLETE"])

+         for c in complete_components:

+             # In case the component is tagged in the build tag and

+             # also tagged in the final tag (or it is build_time_only

+             # and therefore should not be tagged in final tag), skip it.

+             if c.tagged and (c.tagged_in_final or c.build_time_only):

+                 continue

  

-         koji_session = KojiModuleBuilder.get_session(conf, login=False)

+             log.info(

+                 "%r: Component %r is complete, but not tagged in the "

+                 "final and/or build tags.",

+                 module_build, c,

+             )

  

-         threshold = datetime.utcnow() - timedelta(minutes=10)

-         module_builds = db_session.query(models.ModuleBuild).filter(

-             models.ModuleBuild.time_modified < threshold,

-             models.ModuleBuild.state == models.BUILD_STATES["build"]

-         ).all()

-         for module_build in module_builds:

-             complete_components = module_build.current_batch(koji.BUILD_STATES["COMPLETE"])

-             for c in complete_components:

-                 # In case the component is tagged in the build tag and

-                 # also tagged in the final tag (or it is build_time_only

-                 # and therefore should not be tagged in final tag), skip it.

-                 if c.tagged and (c.tagged_in_final or c.build_time_only):

-                     continue

+             # Check in which tags the component is tagged.

+             tag_dicts = koji_session.listTags(c.nvr)

+             tags = [tag_dict["name"] for tag_dict in tag_dicts]

  

+             # If it is tagged in final tag, but MBS does not think so,

+             # schedule fake message.

+             if not c.tagged_in_final and module_build.koji_tag in tags:

                  log.info(

-                     "%r: Component %r is complete, but not tagged in the "

-                     "final and/or build tags.",

-                     module_build, c,

-                 )

- 

-                 # Check in which tags the component is tagged.

-                 tag_dicts = koji_session.listTags(c.nvr)

-                 tags = [tag_dict["name"] for tag_dict in tag_dicts]

- 

-                 # If it is tagged in final tag, but MBS does not think so,

-                 # schedule fake message.

-                 if not c.tagged_in_final and module_build.koji_tag in tags:

-                     msg = module_build_service.messaging.KojiTagChange(

-                         "sync_koji_build_tags_fake_message", module_build.koji_tag, c.package, c.nvr

-                     )

-                     log.info("  Scheduling faked event %r" % msg)

-                     module_build_service.scheduler.consumer.work_queue_put(msg)

- 

-                 # If it is tagged in the build tag, but MBS does not think so,

-                 # schedule fake message.

-                 build_tag = module_build.koji_tag + "-build"

-                 if not c.tagged and build_tag in tags:

-                     msg = module_build_service.messaging.KojiTagChange(

-                         "sync_koji_build_tags_fake_message", build_tag, c.package, c.nvr)

-                     log.info("  Scheduling faked event %r" % msg)

-                     module_build_service.scheduler.consumer.work_queue_put(msg)

- 

-     def poll_greenwave(self, config):

-         """

-         Polls Greenwave for all builds in done state

-         :param db_session: SQLAlchemy DB session

-         :return: None

-         """

-         if greenwave is None:

-             return

- 

-         module_builds = (

-             db_session.query(models.ModuleBuild)

-             .filter_by(state=models.BUILD_STATES["done"], scratch=False).all()

-         )

- 

-         log.info("Checking Greenwave for %d builds", len(module_builds))

- 

-         for build in module_builds:

-             if greenwave.check_gating(build):

-                 build.transition(db_session, config, state=models.BUILD_STATES["ready"])

-             else:

-                 build.state_reason = "Gating failed (MBS will retry in {0} seconds)".format(

-                     conf.polling_interval

-                 )

-                 if greenwave.error_occurred:

-                     build.state_reason += " (Error occured while querying Greenwave)"

-                 build.time_modified = datetime.utcnow()

-             db_session.commit()

+                     "Apply tag %s to module build %r",

+                     module_build.koji_tag, module_build)

+                 tagged.delay(

+                     "internal:sync_koji_build_tags",

+                     module_build.koji_tag, c.package, c.nvr)

+ 

+             # If it is tagged in the build tag, but MBS does not think so,

+             # schedule fake message.

+             build_tag = module_build.koji_tag + "-build"

+             if not c.tagged and build_tag in tags:

+                 log.info(

+                     "Apply build tag %s to module build %r",

+                     build_tag, module_build)

+                 tagged.delay(

+                     "internal:sync_koji_build_tags",

+                     build_tag, c.package, c.nvr)

+ 

+ 

+ @celery_app.task

+ def poll_greenwave():

+     """Polls Greenwave for all builds in done state"""

+     if greenwave is None:

+         return

+ 

+     module_builds = db_session.query(models.ModuleBuild).filter_by(

+         state=models.BUILD_STATES["done"],

+         scratch=False

+     ).all()

+ 

+     log.info("Checking Greenwave for %d builds", len(module_builds))

+ 

+     for build in module_builds:

+         if greenwave.check_gating(build):

+             build.transition(db_session, conf, state=models.BUILD_STATES["ready"])

+         else:

+             build.state_reason = "Gating failed (MBS will retry in {0} seconds)".format(

+                 conf.polling_interval

+             )

+             if greenwave.error_occurred:

+                 build.state_reason += " (Error occured while querying Greenwave)"

+             build.time_modified = datetime.utcnow()

+         db_session.commit()

  

  

- def _has_missed_new_repo_message(module_build, koji_session):

+ def has_missed_new_repo_message(module_build, koji_session):

      """

      Returns whether or not a new repo message has probably been missed.

      """
@@ -489,7 +473,8 @@ 

          # message so module build can recover.

          return True

      log.debug(

-         'Checking status of newRepo task "%d" for %s', module_build.new_repo_task_id, module_build)

+         'Checking status of newRepo task "%d" for %s',

+         module_build.new_repo_task_id, module_build)

      task_info = koji_session.getTaskInfo(module_build.new_repo_task_id)

      # Other final states, FAILED and CANCELED, are handled by retrigger_new_repo_on_failure

      return task_info["state"] == koji.TASK_STATES["CLOSED"]

@@ -4,8 +4,8 @@ 

  import concurrent.futures

  

  from module_build_service import conf, log, models

- import module_build_service.messaging

  from module_build_service.db_session import db_session

+ from module_build_service.scheduler import events

  from .reuse import get_reusable_components, reuse_component

  

  
@@ -104,7 +104,6 @@ 

      # Get the list of components to be built in this batch. We are not building

      # all `unbuilt_components`, because we can meet the num_concurrent_builds

      # threshold

-     further_work = []

      components_to_build = []

      # Sort the unbuilt_components so that the components that take the longest to build are

      # first
@@ -115,8 +114,7 @@ 

          # Only evaluate new components

          if not component.is_waiting_for_build:

              continue

-         msgs = builder.recover_orphaned_artifact(component)

-         further_work += msgs

+         builder.recover_orphaned_artifact(component)

  

      for c in unbuilt_components:

          # If a previous build of the component was found, then the state will be marked as
@@ -149,7 +147,6 @@ 

              future.result()

  

      db_session.commit()

-     return further_work

  

  

  def start_next_batch_build(config, module, builder, components=None):
@@ -248,7 +245,6 @@ 

      log.info("Starting build of next batch %d, %s" % (module.batch, unbuilt_components))

  

      # Attempt to reuse any components possible in the batch before attempting to build any

-     further_work = []

      unbuilt_components_after_reuse = []

      components_reused = False

      should_try_reuse = True
@@ -264,7 +260,7 @@ 

          for c, reusable_c in zip(unbuilt_components, reusable_components):

              if reusable_c:

                  components_reused = True

-                 further_work += reuse_component(c, reusable_c)

+                 reuse_component(c, reusable_c)

              else:

                  unbuilt_components_after_reuse.append(c)

          # Commit the changes done by reuse_component
@@ -274,11 +270,9 @@ 

      # If all the components were reused in the batch then make a KojiRepoChange

      # message and return

      if components_reused and not unbuilt_components_after_reuse:

-         further_work.append(

-             module_build_service.messaging.KojiRepoChange(

-                 "start_build_batch: fake msg", builder.module_build_tag["name"])

-         )

-         return further_work

+         from module_build_service.scheduler.handlers.repos import done as repos_done_handler

+         events.scheduler.add(

+             repos_done_handler, ("start_build_batch: fake_msg", builder.module_build_tag["name"]))

+         return

  

-     return further_work + continue_batch_build(

-         config, module, builder, unbuilt_components_after_reuse)

+     continue_batch_build(config, module, builder, unbuilt_components_after_reuse)

@@ -646,7 +646,8 @@ 

      :return list of architectures

      """

      # Imported here to allow import of utils in GenericBuilder.

-     import module_build_service.builder

+     from module_build_service.builder import GenericBuilder

+ 

      nsvc = mmd.get_nsvc()

  

      # At first, handle BASE_MODULE_ARCHES - this overrides any other option.
@@ -698,8 +699,7 @@ 

              )

              if not module_obj:

                  continue

-             arches = module_build_service.builder.GenericBuilder.get_module_build_arches(

-                 module_obj)

+             arches = GenericBuilder.get_module_build_arches(module_obj)

              if arches:

                  log.info("Setting build arches of %s to %r based on the buildrequired "

                           "module %r." % (nsvc, arches, module_obj))

@@ -2,14 +2,15 @@ 

  # SPDX-License-Identifier: MIT

  import kobo.rpmlib

  

- import module_build_service.messaging

  from module_build_service import log, models, conf

  from module_build_service.db_session import db_session

- from module_build_service.utils.mse import get_base_module_mmds

  from module_build_service.resolver import GenericResolver

+ from module_build_service.scheduler import events

+ from module_build_service.utils.mse import get_base_module_mmds

  

  

- def reuse_component(component, previous_component_build, change_state_now=False):

+ def reuse_component(component, previous_component_build, change_state_now=False,

+                     schedule_fake_events=True):

      """

      Reuses component build `previous_component_build` instead of building

      component `component`
@@ -18,11 +19,17 @@ 

      This allows callers to reuse multiple component builds and commit them all

      at once.

  

-     Returns the list of BaseMessage instances to be handled later by the

-     scheduler.

+     :param ComponentBuild component: Component whihch will reuse previous module build.

+     :param ComponentBuild previous_component_build: Previous component build to reuse.

+     :param bool change_state_now: When True, the component.state will be set to

+         previous_component_build.state. Otherwise, the component.state will be set to BUILDING.

+     :param bool schedule_fake_events: When True, the `events.scheduler.add` will be used to

+         schedule handlers.component.build_task_finalize handler call.

      """

  

      import koji

+     from module_build_service.scheduler.handlers.components import (

+         build_task_finalize as build_task_finalize_handler)

  

      log.info(

          'Reusing component "{0}" from a previous module '
@@ -34,29 +41,22 @@ 

          component.state = previous_component_build.state

      else:

          # Use BUILDING state here, because we want the state to change to

-         # COMPLETE by the fake KojiBuildChange message we are generating

-         # few lines below. If we would set it to the right state right

-         # here, we would miss the code path handling the KojiBuildChange

-         # which works only when switching from BUILDING to COMPLETE.

+         # COMPLETE by scheduling a internal buildsys.build.state.change message

+         # we are generating few lines below.

+         # If we would set it to the right state right here, we would miss the

+         # code path handling that event which works only when switching from

+         # BUILDING to COMPLETE.

          component.state = koji.BUILD_STATES["BUILDING"]

      component.state_reason = "Reused component from previous module build"

      component.nvr = previous_component_build.nvr

      nvr_dict = kobo.rpmlib.parse_nvr(component.nvr)

-     # Add this message to further_work so that the reused

-     # component will be tagged properly

-     return [

-         module_build_service.messaging.KojiBuildChange(

-             msg_id="reuse_component: fake msg",

-             build_id=None,

-             task_id=component.task_id,

-             build_new_state=previous_component_build.state,

-             build_name=nvr_dict["name"],

-             build_version=nvr_dict["version"],

-             build_release=nvr_dict["release"],

-             module_build_id=component.module_id,

-             state_reason=component.state_reason,

-         )

-     ]

+     # Add this event to scheduler so that the reused component will be tagged properly.

+     if schedule_fake_events:

+         args = (

+             "reuse_component: fake msg", None, component.task_id, previous_component_build.state,

+             nvr_dict["name"], nvr_dict["version"], nvr_dict["release"], component.module_id,

+             component.state_reason)

+         events.scheduler.add(build_task_finalize_handler, args)

  

  

  def get_reusable_module(module):
@@ -214,7 +214,7 @@ 

              module.batch = c.batch

  

          # Reuse the component

-         reuse_component(c, component_to_reuse, True)

+         reuse_component(c, component_to_reuse, True, False)

          components_to_tag.append(c.nvr)

  

      # Tag them
@@ -403,8 +403,8 @@ 

          # Create separate lists for the new and previous module build. These lists

          # will have an entry for every build batch *before* the component's

          # batch except for 1, which is reserved for the module-build-macros RPM.

-         # Each batch entry will contain a set of "(name, ref)" with the name and

-         # ref (commit) of the component.

+         # Each batch entry will contain a set of "(name, ref, arches)" with the name,

+         # ref (commit), and arches of the component.

          for i in range(new_module_build_component.batch - 1):

              # This is the first batch which we want to skip since it will always

              # contain only the module-build-macros RPM and it gets built every time
@@ -412,37 +412,34 @@ 

                  continue

  

              new_module_build_components.append({

-                 (value.package, value.ref)

+                 (value.package, value.ref,

+                     tuple(sorted(mmd.get_rpm_component(value.package).get_arches())))

                  for value in new_component_builds

                  if value.batch == i + 1

              })

  

              previous_module_build_components.append({

-                 (value.package, value.ref)

+                 (value.package, value.ref,

+                     tuple(sorted(old_mmd.get_rpm_component(value.package).get_arches())))

                  for value in prev_component_builds

                  if value.batch == i + 1

              })

  

-         # If the previous batches don't have the same ordering and hashes, then the

+         # If the previous batches don't have the same ordering, hashes, and arches, then the

          # component can't be reused

          if previous_module_build_components != new_module_build_components:

              message = ("Cannot reuse the component because a component in a previous"

-                        " batch has been rebuilt")

+                        " batch has been added, removed, or rebuilt")

              new_module_build_component.log_message(db_session, message)

              return None

  

-     for pkg_name in mmd.get_rpm_component_names():

-         pkg = mmd.get_rpm_component(pkg_name)

-         if pkg_name not in old_mmd.get_rpm_component_names():

-             message = ("Cannot reuse the component because a component was added or "

-                        "removed since the compatible module")

-             new_module_build_component.log_message(db_session, message)

-             return None

-         if set(pkg.get_arches()) != set(old_mmd.get_rpm_component(pkg_name).get_arches()):

-             message = ("Cannot reuse the component because the architectures for the package {}"

-                        " have changed since the compatible module build").format(pkg_name)

-             new_module_build_component.log_message(db_session, message)

-             return None

+     # check that arches have not changed

+     pkg = mmd.get_rpm_component(component_name)

+     if set(pkg.get_arches()) != set(old_mmd.get_rpm_component(component_name).get_arches()):

+         message = ("Cannot reuse the component because its architectures"

+                    " have changed since the compatible module build").format(component_name)

+         new_module_build_component.log_message(db_session, message)

+         return None

  

      reusable_component = db_session.query(models.ComponentBuild).filter_by(

          package=component_name, module_id=previous_module_build.id).one()

@@ -394,7 +394,7 @@ 

      mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None

  ):

      # Imported here to allow import of utils in GenericBuilder.

-     import module_build_service.builder

+     from module_build_service.builder import GenericBuilder

  

      # When main_mmd is set, merge the metadata from this mmd to main_mmd,

      # otherwise our current mmd is main_mmd.
@@ -436,7 +436,7 @@ 

      # Get map of packages that have SRPM overrides

      srpm_overrides = get_module_srpm_overrides(module)

  

-     rpm_weights = module_build_service.builder.GenericBuilder.get_build_weights(

+     rpm_weights = GenericBuilder.get_build_weights(

          [c.get_name() for c in rpm_components]

      )

      all_components.sort(key=lambda x: x.get_buildorder())

file modified
+21 -8
@@ -206,15 +206,28 @@ 

              log.error("Invalid JSON submitted")

              raise ValidationError("Invalid JSON submitted")

  

-         if module.state == models.BUILD_STATES["failed"]:

-             raise Forbidden("You can't cancel a failed module")

+         state = r["state"]

+         valid_input_states = ("failed", str(models.BUILD_STATES["failed"]))

+         if state not in valid_input_states:

+             raise ValidationError(

+                 "An invalid state was submitted. Valid states values are: {}"

+                 .format(", ".join(valid_input_states))

+             )

  

-         if r["state"] == "failed" or r["state"] == str(models.BUILD_STATES["failed"]):

-             module.transition(

-                 db.session, conf, models.BUILD_STATES["failed"], "Canceled by %s." % username)

-         else:

-             log.error('The provided state change of "{}" is not supported'.format(r["state"]))

-             raise ValidationError("The provided state change is not supported")

+         valid_states_to_cancel = ("build", "init", "wait")

+         module_state_name = models.INVERSE_BUILD_STATES[module.state]

+         if module_state_name not in valid_states_to_cancel:

+             log.error(

+                 "The user %s attempted to cancel a build in the %s state",

+                 username, module_state_name,

+             )

+             raise ValidationError(

+                 "To cancel a module build, it must be in one of the following states: {}"

+                 .format(", ".join(valid_states_to_cancel))

+             )

+ 

+         module.transition(

+             db.session, conf, models.BUILD_STATES["failed"], "Canceled by %s." % username)

          db.session.add(module)

          db.session.commit()

  

@@ -1,4 +1,2 @@ 

  NAME=mbs-prod-integration-test

- KOJI_IMAGE=quay.io/factory2/koji:prod

- UMB_IMAGE=docker-registry.upshift.redhat.com/factory2/umb:prod

  ENVIRONMENT=prod

@@ -1,4 +1,2 @@ 

  NAME=mbs-stage-integration-test

- KOJI_IMAGE=quay.io/factory2/koji:stage

- UMB_IMAGE=docker-registry.upshift.redhat.com/factory2/umb:stage

  ENVIRONMENT=stage

@@ -45,7 +45,7 @@ 

      skipDefaultCheckout()

    }

    environment {

-     PIPELINE_NAMESPACE = readFile("/run/secrets/kubernetes.io/serviceaccount/namespace").trim()

+     TRIGGER_NAMESPACE = readFile("/run/secrets/kubernetes.io/serviceaccount/namespace").trim()

      PAGURE_API = "${params.PAGURE_URL}/api/0"

      PAGURE_REPO_IS_FORK = "${params.PAGURE_REPO_IS_FORK}"

      PAGURE_REPO_HOME = "${env.PAGURE_URL}${env.PAGURE_REPO_IS_FORK == 'true' ? '/fork' : ''}/${params.PAGURE_REPO_NAME}"
@@ -60,6 +60,8 @@ 

            // the return value of checkout() is unreliable.

            // Not working: env.MBS_GIT_COMMIT = scmVars.GIT_COMMIT

            env.MBS_GIT_COMMIT = sh(returnStdout: true, script: 'git rev-parse HEAD').trim()

+           // Set for pagure function from c3i-library

+           env.GIT_COMMIT = env.MBS_GIT_COMMIT

            echo "Build ${params.MBS_GIT_REF}, commit=${env.MBS_GIT_COMMIT}"

  

            // Is the current branch a pull-request? If no, env.PR_NO will be empty.
@@ -122,13 +124,11 @@ 

              // To enable HTML syntax in build description, go to `Jenkins/Global Security/Markup Formatter` and select 'Safe HTML'.

              def pagureLink = """<a href="${env.PR_URL}">${currentBuild.displayName}</a>"""

              try {

-               def prInfo = withPagure {

-                 it.getPR(env.PR_NO)

-               }

+               def prInfo = pagure.getPR(env.PR_NO)

                pagureLink = """<a href="${env.PR_URL}">PR#${env.PR_NO}: ${escapeHtml(prInfo.title)}</a>"""

                // set PR status to Pending

                if (params.PAGURE_API_KEY_SECRET_NAME)

-                 setBuildStatusOnPagurePR(null, "Build #${env.BUILD_NUMBER} in progress (commit: ${env.MBS_GIT_COMMIT.take(8)})")

+                 pagure.setBuildStatusOnPR(null, "Build #${env.BUILD_NUMBER} in progress (commit: ${env.MBS_GIT_COMMIT.take(8)})")

              } catch (Exception e) {

                echo "Error using pagure API: ${e}"

              }
@@ -138,7 +138,7 @@ 

              currentBuild.description = """<a href="${env.PAGURE_REPO_HOME}/c/${env.MBS_GIT_COMMIT}">${currentBuild.displayName}</a>"""

              if (params.PAGURE_API_KEY_SECRET_NAME) {

                try {

-                 flagCommit('pending', null, "Build #${env.BUILD_NUMBER} in progress (commit: ${env.MBS_GIT_COMMIT.take(8)})")

+                 pagure.flagCommit('pending', null, "Build #${env.BUILD_NUMBER} in progress (commit: ${env.MBS_GIT_COMMIT.take(8)})")

                  echo "Updated commit ${env.MBS_GIT_COMMIT} status to PENDING."

                } catch (e) {

  		echo "Error updating commit ${env.MBS_GIT_COMMIT} status to PENDING: ${e}"
@@ -151,24 +151,27 @@ 

      stage('Allocate C3IaaS project') {

        when {

          expression {

-           return params.USE_C3IAAS == 'true' &&

-                  params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE &&

-                  params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME

+           return params.USE_C3IAAS == 'true'

          }

        }

        steps {

          script {

+           if (!params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME ||

+               !params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE) {

+             error("USE_C3IAAS is set to true but missing C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME" +

+                   " or C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE")

+           }

            if (env.PR_NO) {

-             env.C3IAAS_NAMESPACE = "c3i-mbs-pr-${env.PR_NO}-git${env.MBS_GIT_COMMIT.take(8)}"

+             env.PIPELINE_ID = "c3i-mbs-pr-${env.PR_NO}-git${env.MBS_GIT_COMMIT.take(8)}"

            } else {

-             env.C3IAAS_NAMESPACE = "c3i-mbs-${params.MBS_GIT_REF}-git${env.MBS_GIT_COMMIT.take(8)}"

+             env.PIPELINE_ID = "c3i-mbs-${params.MBS_GIT_REF}-git${env.MBS_GIT_COMMIT.take(8)}"

            }

-           echo "Requesting new OpenShift project ${env.C3IAAS_NAMESPACE}..."

+           echo "Requesting new OpenShift project ${env.PIPELINE_ID}..."

            openshift.withCluster() {

              openshift.withProject(params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE) {

                c3i.buildAndWait(script: this, objs: "bc/${params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME}",

-                 '-e', "PROJECT_NAME=${env.C3IAAS_NAMESPACE}",

-                 '-e', "ADMIN_GROUPS=system:serviceaccounts:${PIPELINE_NAMESPACE}",

+                 '-e', "PROJECT_NAME=${env.PIPELINE_ID}",

+                 '-e', "ADMIN_GROUPS=system:serviceaccounts:${TRIGGER_NAMESPACE}",

                  '-e', "LIFETIME_IN_MINUTES=${params.C3IAAS_LIFETIME}"

                )

              }
@@ -177,10 +180,10 @@ 

        }

        post {

          success {

-           echo "Allocated project ${env.C3IAAS_NAMESPACE}"

+           echo "Allocated project ${env.PIPELINE_ID}"

          }

          failure {

-           echo "Failed to allocate ${env.C3IAAS_NAMESPACE} project"

+           echo "Failed to allocate ${env.PIPELINE_ID} project"

          }

        }

      }
@@ -191,7 +194,7 @@ 

        steps {

          script {

            openshift.withCluster() {

-             openshift.withProject(env.C3IAAS_NAMESPACE ?: env.PIPELINE_NAMESPACE) {

+             openshift.withProject(env.PIPELINE_ID) {

                // OpenShift BuildConfig doesn't support specifying a tag name at build time.

                // We have to create a new BuildConfig for each image build.

                echo 'Creating a BuildConfig for mbs-backend build...'
@@ -204,7 +207,7 @@ 

                  // because refspec cannot be customized in an OpenShift build.

                  '-p', "MBS_GIT_REF=${env.PR_NO ? params.MBS_GIT_REF : env.MBS_GIT_COMMIT}",

                  '-p', "MBS_BACKEND_IMAGESTREAM_NAME=${params.MBS_BACKEND_IMAGESTREAM_NAME}",

-                 '-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.C3IAAS_NAMESPACE ?: env.PIPELINE_NAMESPACE}",

+                 '-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",

                  '-p', "MBS_IMAGE_TAG=${env.TEMP_TAG}",

                  '-p', "EXTRA_RPMS=${params.EXTRA_RPMS}",

                  '-p', "CREATED=${created}"
@@ -228,13 +231,15 @@ 

          }

          cleanup {

            script {

-             if (!env.C3IAAS_NAMESPACE) {

+             if (params.USE_C3IAAS != 'true') {

                openshift.withCluster() {

-                 echo 'Tearing down...'

-                 openshift.selector('bc', [

-                   'app': env.BACKEND_BUILDCONFIG_ID,

-                   'template': 'mbs-backend-build-template',

-                   ]).delete()

+                 openshift.withProject(env.PIPELINE_ID) {

+                   echo 'Tearing down...'

+                   openshift.selector('bc', [

+                     'app': env.BACKEND_BUILDCONFIG_ID,

+                     'template': 'mbs-backend-build-template',

+                     ]).delete()

+                 }

                }

              }

            }
@@ -248,7 +253,7 @@ 

        steps {

          script {

            openshift.withCluster() {

-             openshift.withProject(env.C3IAAS_NAMESPACE ?: env.PIPELINE_NAMESPACE) {

+             openshift.withProject(env.PIPELINE_ID) {

                // OpenShift BuildConfig doesn't support specifying a tag name at build time.

                // We have to create a new BuildConfig for each image build.

                echo 'Creating a BuildConfig for mbs-frontend build...'
@@ -261,10 +266,10 @@ 

                  // because refspec cannot be customized in an OpenShift build.

                  '-p', "MBS_GIT_REF=${env.PR_NO ? params.MBS_GIT_REF : env.MBS_GIT_COMMIT}",

                  '-p', "MBS_FRONTEND_IMAGESTREAM_NAME=${params.MBS_FRONTEND_IMAGESTREAM_NAME}",

-                 '-p', "MBS_FRONTEND_IMAGESTREAM_NAMESPACE=${env.C3IAAS_NAMESPACE ?: env.PIPELINE_NAMESPACE}",

+                 '-p', "MBS_FRONTEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",

                  '-p', "MBS_IMAGE_TAG=${env.TEMP_TAG}",

                  '-p', "MBS_BACKEND_IMAGESTREAM_NAME=${params.MBS_BACKEND_IMAGESTREAM_NAME}",

-                 '-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.C3IAAS_NAMESPACE ?: env.PIPELINE_NAMESPACE}",

+                 '-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",

                  '-p', "CREATED=${created}"

                )

                def build = c3i.buildAndWait(script: this, objs: processed, '--from-dir=.')
@@ -311,8 +316,7 @@ 

                    '-e', "MBS_FRONTEND_IMAGE=${env.FRONTEND_IMAGE_REF}",

                    '-e', "TEST_IMAGES=${env.BACKEND_IMAGE_REF},${env.FRONTEND_IMAGE_REF}",

                    '-e', "IMAGE_IS_SCRATCH=${params.MBS_GIT_REF != params.MBS_MAIN_BRANCH}",

-                   // If env.C3IAAS_NAMESPACE has not been defined, tests will be run in the current namespace

-                   '-e', "TEST_NAMESPACE=${env.C3IAAS_NAMESPACE ?: ''}",

+                   '-e', "TEST_NAMESPACE=${env.PIPELINE_ID}",

                    '-e', "TESTCASES='${params.TESTCASES}'",

                    '-e', "CLEANUP=${params.CLEANUP}"

                )
@@ -402,8 +406,7 @@ 

        when {

          expression {

            return "${params.MBS_DEV_IMAGE_TAG}" && params.TAG_INTO_IMAGESTREAM == "true" &&

-             (params.FORCE_PUBLISH_IMAGE == "true" || params.MBS_GIT_REF == params.MBS_MAIN_BRANCH) &&

-             !env.C3IAAS_NAMESPACE

+             (params.FORCE_PUBLISH_IMAGE == "true" || params.MBS_GIT_REF == params.MBS_MAIN_BRANCH)

          }

        }

        steps {
@@ -434,18 +437,20 @@ 

    post {

      cleanup {

        script {

-         if (params.CLEANUP == 'true' && !env.C3IAAS_NAMESPACE) {

+         if (params.CLEANUP == 'true' && params.USE_C3IAAS != 'true') {

            openshift.withCluster() {

-             if (env.BACKEND_IMAGE_TAG) {

-               echo "Removing tag ${env.BACKEND_IMAGE_TAG} from the ${params.MBS_BACKEND_IMAGESTREAM_NAME} ImageStream..."

-               openshift.withProject(params.MBS_BACKEND_IMAGESTREAM_NAMESPACE) {

-                 openshift.tag("${params.MBS_BACKEND_IMAGESTREAM_NAME}:${env.BACKEND_IMAGE_TAG}", "-d")

+             openshift.withProject(env.PIPELINE_ID) {

+               if (env.BACKEND_IMAGE_TAG) {

+                 echo "Removing tag ${env.BACKEND_IMAGE_TAG} from the ${params.MBS_BACKEND_IMAGESTREAM_NAME} ImageStream..."

+                 openshift.withProject(params.MBS_BACKEND_IMAGESTREAM_NAMESPACE) {

+                   openshift.tag("${params.MBS_BACKEND_IMAGESTREAM_NAME}:${env.BACKEND_IMAGE_TAG}", "-d")

+                 }

                }

-             }

-             if (env.FRONTEND_IMAGE_TAG) {

-               echo "Removing tag ${env.FRONTEND_IMAGE_TAG} from the ${params.MBS_FRONTEND_IMAGESTREAM_NAME} ImageStream..."

-               openshift.withProject(params.MBS_FRONTEND_IMAGESTREAM_NAMESPACE) {

-                 openshift.tag("${params.MBS_FRONTEND_IMAGESTREAM_NAME}:${env.FRONTEND_IMAGE_TAG}", "-d")

+               if (env.FRONTEND_IMAGE_TAG) {

+                 echo "Removing tag ${env.FRONTEND_IMAGE_TAG} from the ${params.MBS_FRONTEND_IMAGESTREAM_NAME} ImageStream..."

+                 openshift.withProject(params.MBS_FRONTEND_IMAGESTREAM_NAMESPACE) {

+                   openshift.tag("${params.MBS_FRONTEND_IMAGESTREAM_NAME}:${env.FRONTEND_IMAGE_TAG}", "-d")

+                 }

                }

              }

            }
@@ -457,7 +462,7 @@ 

          // on pre-merge workflow success

          if (params.PAGURE_API_KEY_SECRET_NAME && env.PR_NO) {

            try {

-             setBuildStatusOnPagurePR(100, "Build #${env.BUILD_NUMBER} successful (commit: ${env.MBS_GIT_COMMIT.take(8)})")

+             pagure.setBuildStatusOnPR(100, "Build #${env.BUILD_NUMBER} successful (commit: ${env.MBS_GIT_COMMIT.take(8)})")

              echo "Updated PR #${env.PR_NO} status to PASS."

            } catch (e) {

              echo "Error updating PR #${env.PR_NO} status to PASS: ${e}"
@@ -466,7 +471,7 @@ 

          // on post-merge workflow success

          if (params.PAGURE_API_KEY_SECRET_NAME && !env.PR_NO) {

            try {

-             flagCommit('success', 100, "Build #${env.BUILD_NUMBER} successful (commit: ${env.MBS_GIT_COMMIT.take(8)})")

+             pagure.flagCommit('success', 100, "Build #${env.BUILD_NUMBER} successful (commit: ${env.MBS_GIT_COMMIT.take(8)})")

              echo "Updated commit ${env.MBS_GIT_COMMIT} status to PASS."

            } catch (e) {

              echo "Error updating commit ${env.MBS_GIT_COMMIT} status to PASS: ${e}"
@@ -480,17 +485,17 @@ 

          if (params.PAGURE_API_KEY_SECRET_NAME && env.PR_NO) {

            // updating Pagure PR flag

            try {

-             setBuildStatusOnPagurePR(0, "Build #${env.BUILD_NUMBER} failed (commit: ${env.MBS_GIT_COMMIT.take(8)})")

+             pagure.setBuildStatusOnPR(0, "Build #${env.BUILD_NUMBER} failed (commit: ${env.MBS_GIT_COMMIT.take(8)})")

              echo "Updated PR #${env.PR_NO} status to FAILURE."

            } catch (e) {

              echo "Error updating PR #${env.PR_NO} status to FAILURE: ${e}"

            }

            // making a comment

            try {

-             commentOnPR("""

+             pagure.commentOnPR("""

              Build #${env.BUILD_NUMBER} [failed](${env.BUILD_URL}) (commit: ${env.MBS_GIT_COMMIT}).

              Rebase or make new commits to rebuild.

-             """.stripIndent())

+             """.stripIndent(), env.PR_NO)

              echo "Comment made."

            } catch (e) {

              echo "Error making a comment on PR #${env.PR_NO}: ${e}"
@@ -501,7 +506,7 @@ 

            // updating Pagure commit flag

            if (params.PAGURE_API_KEY_SECRET_NAME) {

              try {

-               flagCommit('failure', 0, "Build #${env.BUILD_NUMBER} failed (commit: ${env.MBS_GIT_COMMIT.take(8)})")

+               pagure.flagCommit('failure', 0, "Build #${env.BUILD_NUMBER} failed (commit: ${env.MBS_GIT_COMMIT.take(8)})")

                echo "Updated commit ${env.MBS_GIT_COMMIT} status to FAILURE."

              } catch (e) {

                echo "Error updating commit ${env.MBS_GIT_COMMIT} status to FAILURE: ${e}"
@@ -524,38 +529,6 @@ 

    def prMatch = branch =~ /^(?:.+\/)?pull\/(\d+)\/head$/

    return prMatch ? prMatch[0][1] : ''

  }

- def withPagure(args=[:], cl) {

-   args.apiUrl = env.PAGURE_API

-   args.repo = env.PAGURE_REPO_NAME

-   args.isFork = env.PAGURE_REPO_IS_FORK == 'true'

-   def pagureClient = pagure.client(args)

-   return cl(pagureClient)

- }

- def withPagureCreds(args=[:], cl) {

-   def pagureClient = null

-   withCredentials([string(credentialsId: "${env.PIPELINE_NAMESPACE}-${env.PAGURE_API_KEY_SECRET_NAME}", variable: 'TOKEN')]) {

-     args.token = env.TOKEN

-     pagureClient = withPagure(args, cl)

-   }

-   return pagureClient

- }

- def setBuildStatusOnPagurePR(percent, String comment) {

-   withPagureCreds {

-     it.updatePRStatus(username: 'c3i-jenkins', uid: "ci-pre-merge-${env.MBS_GIT_COMMIT.take(8)}",

-       url: env.BUILD_URL, percent: percent, comment: comment, pr: env.PR_NO)

-   }

- }

- def flagCommit(status, percent, comment) {

-   withPagureCreds {

-     it.flagCommit(username: 'c3i-jenkins', uid: "ci-post-merge-${env.MBS_GIT_COMMIT.take(8)}", status: status,

-       url: env.BUILD_URL, percent: percent, comment: comment, commit: env.MBS_GIT_COMMIT)

-   }

- }

- def commentOnPR(String comment) {

-   withPagureCreds {

-     it.commentOnPR(comment: comment, pr: env.PR_NO)

-   }

- }

  def sendBuildStatusEmail(String status) {

    def recipient = params.MAIL_ADDRESS

    def subject = "Jenkins job ${env.JOB_NAME} #${env.BUILD_NUMBER} ${status}."

@@ -35,20 +35,6 @@ 

    displayName: The Koji container image to be tested

    description: This field must be in repo:tag or repo@sha256 format

    value: quay.io/factory2/koji:latest

- - name: UMB_IMAGE

-   displayName: The UMB container image to be tested

-   description: This field must be in repo:tag or repo@sha256 format

-   value: docker-registry.upshift.redhat.com/factory2/umb:latest

- - name: USE_KRB5

-   displayName: Deploy a Kerberos KDC and configure the MBS frontend to require Kerberos authentication.

-   description: If not set to "true", no KDC will be deployed and MBS will allow anonymous access.

-   required: true

-   value: "true"

- - name: KRB5_IMAGE

-   displayName: Kerberos 5 KDC image

-   description: The image used to deploy a Kerberos 5 KDC, if configured to do so.

-   required: false

-   value: quay.io/factory2/krb5-fedora:latest

  - name: TEST_IMAGES

    displayName: Images being tested

    description: >-
@@ -67,6 +53,9 @@ 

      Namespace where the Jenkins agent for this test will run, and where

      test resources will be created.

    required: false

+ - name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE

+   displayName: Namespace with pipeline-as-a-service build

+   value: c3i

  - name: ENVIRONMENT

    displayName: environment name (dev/stage/prod)

    required: true
@@ -129,18 +118,10 @@ 

            value: "${MBS_FRONTEND_IMAGE}"

          - name: KOJI_IMAGE

            value: "${KOJI_IMAGE}"

-         - name: UMB_IMAGE

-           value: "${UMB_IMAGE}"

-         - name: USE_KRB5

-           value: "${USE_KRB5}"

-         - name: KRB5_IMAGE

-           value: "${KRB5_IMAGE}"

          - name: TEST_IMAGES

            value: "${TEST_IMAGES}"

          - name: IMAGE_IS_SCRATCH

            value: "true"

-         - name: "TEST_ID"

-           value: ""

          - name: JENKINS_AGENT_IMAGE

            value: "${JENKINS_AGENT_IMAGE}"

          - name: JENKINS_AGENT_CLOUD_NAME
@@ -151,6 +132,8 @@ 

            value: "${ENVIRONMENT}"

          - name: MESSAGING_PROVIDER

            value: "${MESSAGING_PROVIDER}"

+         - name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE

+           value: "${PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE}"

          - name: JENKINS_AGENT_SERVICE_ACCOUNT

            value: "${NAME}-jenkins-slave"

          - name: TESTCASES

@@ -39,15 +39,12 @@ 

      skipDefaultCheckout()

    }

    environment {

-     // Jenkins BUILD_TAG could be too long (> 63 characters) for OpenShift to consume

-     TEST_ID = "${params.TEST_ID ?: UUID.randomUUID().toString().substring(0,7)}"

+     PIPELINE_ID = "${params.TEST_NAMESPACE}"

    }

    stages {

      stage('Prepare') {

        steps {

          script {

-           // Don't set ENVIRONMENT_LABEL in the environment block! Otherwise you will get 2 different UUIDs.

-           env.ENVIRONMENT_LABEL = "test-${env.TEST_ID}"

            // MBS_GIT_REF can be either a regular branch (in the heads/ namespace), a pull request

            // branch (in the pull/ namespace), or a full 40-character sha1, which is assumed to

            // exist on the master branch.
@@ -87,14 +84,16 @@ 

        }

      }

      stage('Route suffix') {

+       when {

+          expression { !env.PAAS_DOMAIN }

+       }

        steps {

          script {

            openshift.withCluster() {

-             openshift.withProject(params.TEST_NAMESPACE) {

+             openshift.withProject(env.PIPELINE_ID) {

                def testroute = openshift.create('route', 'edge', 'test',  '--service=test', '--port=8080')

                def testhost = testroute.object().spec.host

-               // trim off the test- prefix

-               env.ROUTE_SUFFIX = testhost.drop(5)

+               env.PAAS_DOMAIN = testhost.minus("test-${env.PIPELINE_ID}.")

                testroute.delete()

              }

            }
@@ -102,186 +101,33 @@ 

        }

        post {

          success {

-           echo "Routes end with ${env.ROUTE_SUFFIX}"

-         }

-       }

-     }

-     stage('Generate CA') {

-       steps {

-         script {

-           ca.gen_ca()

-         }

-       }

-     }

-     stage('Deploy KDC') {

-       when {

-         expression {

-           return params.USE_KRB5 == 'true'

-         }

-       }

-       steps {

-         script {

-           env.KRB5_DOMAIN = env.ROUTE_SUFFIX.split('\\.', 2).last()

-           env.KRB5_REALM = env.KRB5_DOMAIN.toUpperCase()

-           env.KRB5_ADMIN_PASSWORD = UUID.randomUUID().toString().take(12)

-           openshift.withCluster() {

-             openshift.withProject(params.TEST_NAMESPACE) {

-               def deployed = krb5.deploy(script: this, test_id: env.TEST_ID,

-                                          realm: env.KRB5_REALM, domain: env.KRB5_DOMAIN,

-                                          admin_password: env.KRB5_ADMIN_PASSWORD)

-               // Wait for the KDC to become available, to allow creation of

-               // principals and keytabs for subsequent deployments.

-               c3i.waitForDeployment(script: this, objs: deployed)

-               def ports = openshift.selector('service', "kerberos-${TEST_ID}").object().spec.ports

-               def kdcPort = ports.find { it.name == 'kdc-udp' }.nodePort

-               def adminPort = ports.find { it.name == 'admin' }.nodePort

-               def kpasswdPort = ports.find { it.name == 'kpasswd-udp' }.nodePort

-               def krb5Host = "krb5-${TEST_ID}-${env.ROUTE_SUFFIX}"

-               env.KRB5_KDC_HOST = "${krb5Host}:${kdcPort}"

-               env.KRB5_ADMIN_HOST = "${krb5Host}:${adminPort}"

-               env.KRB5_KPASSWD_HOST = "${krb5Host}:${kpasswdPort}"

-             }

-           }

-         }

-       }

-       post {

-         success {

-           echo "KDC deployed: REALM: ${env.KRB5_REALM} KDC: ${env.KRB5_KDC_HOST}"

-         }

-         failure {

-           echo "KDC deployment FAILED"

-         }

-       }

-     }

-     stage('Deploy UMB') {

-       steps {

-         script {

-           openshift.withCluster() {

-             openshift.withProject(params.TEST_NAMESPACE) {

-               // The extact hostname doesn't matter, (as long as it resolves to the cluster) because traffic will

-               // be routed to the pod via the NodePort.

-               // However, the hostname we use to access the service must be a subjectAltName of the certificate

-               // being served by the service.

-               env.UMB_HOST = "umb-${TEST_ID}-${env.ROUTE_SUFFIX}"

-               ca.gen_ssl_cert("umb-${TEST_ID}-broker", env.UMB_HOST)

-               def keystore = ca.get_keystore("umb-${TEST_ID}-broker", 'mbskeys')

-               def truststore = ca.get_truststore('mbstrust')

-               deployments = umb.deploy(script: this, test_id: env.TEST_ID,

-                                        keystore_data: keystore, keystore_password: 'mbskeys',

-                                        truststore_data: truststore, truststore_password: 'mbstrust',

-                                        broker_image: params.UMB_IMAGE)

-               def ports = openshift.selector('service', "umb-${TEST_ID}-broker").object().spec.ports

-               env.UMB_AMQPS_PORT = ports.find { it.name == 'amqps' }.nodePort

-               env.UMB_STOMP_SSL_PORT = ports.find { it.name == 'stomp-ssl' }.nodePort

-             }

-           }

-         }

-       }

-       post {

-         success {

-           echo "UMB deployed: amqps: ${env.UMB_HOST}:${env.UMB_AMQPS_PORT} stomp-ssl: ${env.UMB_HOST}:${env.UMB_STOMP_SSL_PORT}"

-         }

-         failure {

-           echo "UMB deployment FAILED"

-         }

-       }

-     }

-     stage('Deploy Koji') {

-       steps {

-         script {

-           openshift.withCluster() {

-             openshift.withProject(params.TEST_NAMESPACE) {

-               env.KOJI_SSL_HOST = "koji-${TEST_ID}-hub-${env.ROUTE_SUFFIX}"

-               def hubcert = ca.get_ssl_cert("koji-${TEST_ID}-hub", env.KOJI_SSL_HOST)

-               env.KOJI_ADMIN = "mbs-${TEST_ID}-koji-admin"

-               env.KOJI_MSG_CERT = "koji-${TEST_ID}-msg"

-               def deployed = koji.deploy(script: this, test_id: env.TEST_ID,

-                                          hubca: ca.get_ca_cert(), hubcert: hubcert,

-                                          brokerurl: "amqps://${env.UMB_HOST}:${env.UMB_AMQPS_PORT}",

-                                          brokercert: ca.get_ssl_cert(env.KOJI_MSG_CERT),

-                                          admin_user: env.KOJI_ADMIN,

-                                          hub_image: params.KOJI_IMAGE)

-               deployments = deployments.union(deployed)

-             }

-           }

-         }

-       }

-       post {

-         success {

-           echo "Koji deployed: hub: https://${env.KOJI_SSL_HOST}/"

-         }

-         failure {

-           echo "Koji deployment FAILED"

-         }

-       }

-     }

-     stage('Deploy MBS') {

-       steps {

-         script {

-           env.MBS_SSL_HOST = "mbs-${TEST_ID}-frontend-${env.ROUTE_SUFFIX}"

-           def frontendcert = ca.get_ssl_cert("mbs-${TEST_ID}-frontend", env.MBS_SSL_HOST)

-           // Required for accessing src.fedoraproject.org

-           def digicertca = readFile file: 'openshift/integration/koji/resources/certs/DigiCertHighAssuranceEVRootCA.pem'

-           def cabundle = ca.get_ca_cert().cert + digicertca

-           def msgcert = ca.get_ssl_cert("mbs-${TEST_ID}-msg")

-           def kojicert = ca.get_ssl_cert(env.KOJI_ADMIN)

-           if (params.USE_KRB5 == 'true') {

-             def krbAdmin = krb5.adminClient()

-             def krbsvc = "HTTP/${env.MBS_SSL_HOST}"

-             krbAdmin.addService(krbsvc)

-             env.MBS_FRONTEND_KEYTAB = krbAdmin.getKeytab(krbsvc)

-             // Usernames between MBS and Koji need to be consistent,

-             // so use the Koji admin as the MBS user.

-             env.KRB5_PRINCIPAL = env.KOJI_ADMIN

-             env.KRB5_PASSWORD = UUID.randomUUID().toString().take(12)

-             krbAdmin.addPrincipal(env.KRB5_PRINCIPAL, env.KRB5_PASSWORD)

-           }

-           openshift.withCluster() {

-             openshift.withProject(params.TEST_NAMESPACE) {

-               def deployed = mbs.deploy(script: this, test_id: env.TEST_ID,

-                                         kojicert: kojicert, kojica: ca.get_ca_cert(),

-                                         brokercert: msgcert,

-                                         frontendcert: frontendcert, frontendca: ca.get_ca_cert(),

-                                         cacerts: cabundle,

-                                         frontend_keytab: params.USE_KRB5 == 'true' ? env.MBS_FRONTEND_KEYTAB : '',

-                                         krb5_conf_configmap: params.USE_KRB5 == 'true' ? "krb5-${TEST_ID}-config" : '',

-                                         krb5_user: params.USE_KRB5 == 'true' ? env.KRB5_PRINCIPAL : '',

-                                         kojiurl: "https://${env.KOJI_SSL_HOST}",

-                                         stompuri: "${env.UMB_HOST}:${env.UMB_STOMP_SSL_PORT}",

-                                         backend_image: params.MBS_BACKEND_IMAGE,

-                                         frontend_image: params.MBS_FRONTEND_IMAGE)

-               deployments = deployments.union(deployed)

-             }

-           }

-         }

-       }

-       post {

-         success {

-           echo "MBS deployed: frontend: https://${env.MBS_SSL_HOST}/"

-         }

-         failure {

-           echo "MBS deployment FAILED"

+           echo "Routes end with ${env.PAAS_DOMAIN}"

          }

        }

      }

-     stage('Wait for deployments') {

+     stage('Deploy test environment') {

        steps {

          script {

            openshift.withCluster() {

-             openshift.withProject(params.TEST_NAMESPACE) {

-               c3i.waitForDeployment(script: this, objs: deployments)

+             openshift.withProject(params.PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE) {

+               c3i.buildAndWait(script: this, objs: "bc/pipeline-as-a-service",

+                 '-e', "DEFAULT_IMAGE_TAG=${env.ENVIRONMENT}",

+                 '-e', "PIPELINE_ID=${env.PIPELINE_ID}",

+                 '-e', "WAIVERDB_IMAGE=",

+                 '-e', "C3IAAS_PROJECT=",

+                 '-e', "RESULTSDB_IMAGE=",

+                 '-e', "RESULTSDB_UPDATER_IMAGE=",

+                 '-e', "GREENWAVE_IMAGE=",

+                 '-e', "DATAGREPPER_IMAGE=",

+                 '-e', "DATANOMMER_IMAGE=",

+                 '-e', "MBS_BACKEND_IMAGE=${env.MBS_BACKEND_IMAGE}",

+                 '-e', "MBS_FRONTEND_IMAGE=${env.MBS_FRONTEND_IMAGE}",

+                 '-e', "PAAS_DOMAIN=${env.PAAS_DOMAIN}"

+               )

              }

            }

          }

        }

-       post {

-         success {

-           echo "Deployments complete"

-         }

-         failure {

-           echo 'Deployments FAILED'

-         }

-       }

      }

      stage('Run tests') {

        steps {
@@ -334,10 +180,8 @@ 

          }

          openshift.withCluster() {

            openshift.withProject(params.TEST_NAMESPACE) {

-             if (deployments) {

-               echo 'Getting logs from all deployments...'

-               deployments.logs('--tail=100')

-             }

+             echo 'Getting logs from all deployments...'

+             openshift.selector('pods', ['c3i.redhat.com/pipeline': env.PIPELINE_ID]).logs('--tail 100')

            }

          }

        }
@@ -349,7 +193,7 @@ 

              /* Tear down everything we just created */

              echo 'Tearing down test resources...'

              openshift.selector('all,pvc,configmap,secret',

-                                ['environment': env.ENVIRONMENT_LABEL]).delete('--ignore-not-found=true')

+                                ['c3i.redhat.com/pipeline': env.PIPELINE_ID]).delete('--ignore-not-found=true')

            }

          } else {

            echo 'Skipping cleanup'
@@ -359,6 +203,10 @@ 

    }

  }

  def sendToResultsDB(imageRef, status) {

+   if (!params.MESSAGING_PROVIDER) {

+     echo "Message bus is not set. Skipping send of:\nimageRef: ${imageRef}\nstatus: ${status}"

+     return

+   }

    def (repourl, digest) = imageRef.tokenize('@')

    def (registry, reponame) = repourl.split('/', 2)

    def image = reponame.split('/').last()

@@ -151,21 +151,19 @@ 

                      // Need to prefix the rev with origin/ for pollSCM to work correctly

                      def rev = "origin/${polled}"

                      def scmVars = c3i.clone(repo: env.GIT_URL, branch: polled, rev: rev)

-                     env.MBS_GIT_COMMIT = scmVars.GIT_COMMIT

+                     env.GIT_COMMIT = scmVars.GIT_COMMIT

                      // setting build display name

                      def prefix = 'origin/'

                      def branch = scmVars.GIT_BRANCH.startsWith(prefix) ? scmVars.GIT_BRANCH.substring(prefix.size())

                        : scmVars.GIT_BRANCH // origin/pull/1234/head -> pull/1234/head, origin/master -> master

                      env.MBS_GIT_BRANCH = branch

-                     echo "Build on branch=${env.MBS_GIT_BRANCH}, commit=${env.MBS_GIT_COMMIT}"

+                     echo "Build on branch=${env.MBS_GIT_BRANCH}, commit=${env.GIT_COMMIT}"

                      if (env.PAGURE_POLLING_FOR_PR == 'false') {

-                       currentBuild.displayName = "${env.MBS_GIT_BRANCH}: ${env.MBS_GIT_COMMIT.substring(0, 7)}"

-                       currentBuild.description = """<a href="${env.PAGURE_REPO_HOME}/c/${env.MBS_GIT_COMMIT}">${currentBuild.displayName}</a>"""

+                       currentBuild.displayName = "${env.MBS_GIT_BRANCH}: ${env.GIT_COMMIT.substring(0, 7)}"

+                       currentBuild.description = """<a href="${env.PAGURE_REPO_HOME}/c/${env.GIT_COMMIT}">${currentBuild.displayName}</a>"""

                      } else if (env.PAGURE_POLLING_FOR_PR == 'true' && branch ==~ /^pull\/[0-9]+\/head$/) {

                        env.PR_NO = branch.split('/')[1]

-                       def prInfo = withPagure {

-                         it.getPR(env.PR_NO)

-                       }

+                       def prInfo = pagure.getPR(env.PR_NO)

                        if (prInfo.status == 'Open') {

                          env.PR_URL = "${env.PAGURE_REPO_HOME}/pull-request/${env.PR_NO}"

                          // To HTML syntax in build description, go to `Jenkins/Global Security/Markup Formatter` and select 'Safe HTML'.
@@ -212,7 +210,8 @@ 

                        echo 'Starting a MBS build run...'

                        def devBuild = c3i.build(script: this,

                          objs: "bc/${env.PAGURE_POLLING_FOR_PR == 'true' ? env.PREMERGE_JOB_NAME : env.POSTMERGE_JOB_NAME}",

-                         '-e', "MBS_GIT_REF=${env.MBS_GIT_BRANCH}"

+                         '-e', "MBS_GIT_REF=${env.MBS_GIT_BRANCH}", '-e', "PAGURE_REPO_IS_FORK=${env.PAGURE_REPO_IS_FORK}",

+                         '-e', "PAGURE_REPO_NAME=${env.PAGURE_REPO_NAME}"

                        )

                        c3i.waitForBuildStart(script: this, build: devBuild)

                        def devBuildInfo = devBuild.object()
@@ -225,10 +224,3 @@ 

                }

              }

            }

-           def withPagure(args=[:], cl) {

-             args.apiUrl = env.PAGURE_API

-             args.repo = env.PAGURE_REPO_NAME

-             args.isFork = env.PAGURE_REPO_IS_FORK == 'true'

-             def pagureClient = pagure.client(args)

-             return cl(pagureClient)

-           }

@@ -2,9 +2,14 @@ 

  import groovy.json.JsonOutput

  

  def runTests() {

-   def clientcert = ca.get_ssl_cert(env.KOJI_ADMIN)

-   koji.setConfig("https://${env.KOJI_SSL_HOST}/kojihub", "https://${env.KOJI_SSL_HOST}/kojifiles",

-                  clientcert.cert, clientcert.key, ca.get_ca_cert().cert)

+   def koji_admin = controller.getVar('KOJI_ADMIN')

+   def clientcert = controller.httpGet("/ca/${koji_admin}", true)

+   def koji_ssl_host = controller.getVar('KOJI_HUB_HOST')

+   def mbs_host = controller.getVar('MBS_FRONTEND_HOST')

+   def ca_cert = controller.httpGet("/ca/cacert")

+   koji.setConfig("https://${koji_ssl_host}/kojihub",

+                  "https://${koji_ssl_host}/kojifiles",

+                  clientcert.cert, clientcert.key, ca_cert)

    def tags = koji.callMethod("listTags")

    if (!tags.any { it.name == "module-f28" }) {

      koji.addTag("module-f28")
@@ -15,9 +20,9 @@ 

    try {

      // There's currently no way to query whether a given user has CG access, so just add it

      // and hope no one else has already done it.

-     koji.runCmd("grant-cg-access", env.KOJI_ADMIN, "module-build-service", "--new")

+     koji.runCmd("grant-cg-access", koji_admin, "module-build-service", "--new")

    } catch (ex) {

-     echo "Granting cg-access to ${env.KOJI_ADMIN} failed, assuming it was already provided in a previous test"

+     echo "Granting cg-access to ${koji_admin} failed, assuming it was already provided in a previous test"

    }

  

    if (!koji.callMethod("listBTypes").any { it.name == "module" }) {
@@ -44,8 +49,8 @@ 

        documentation: https://fedoraproject.org/wiki/Fedora_Packaging_Guidelines_for_Modules

    """

  

-   writeFile file: 'ca-cert.pem', text: ca.get_ca_cert().cert

-   def url = "https://${env.MBS_SSL_HOST}/module-build-service/1/module-builds/"

+   writeFile file: 'ca-cert.pem', text: ca_cert

+   def url = "https://${mbs_host}/module-build-service/1/module-builds/"

    def curlargs = """

      --cacert ca-cert.pem \

      -H 'Content-Type: application/json' \
@@ -55,9 +60,9 @@ 

      -w '%{http_code}'

    """.trim()

    def http_code, response

-   if (env.KRB5_REALM) {

+   if (controller.getVar("KRB5_REALM")) {

      writeFile file: 'buildparams.json', text: JsonOutput.toJson([modulemd: testmodule])

-     krb5.withKrb {

+     krb5.withKrb(controller.getKrb5Vars(koji_admin)) {

        http_code = sh script: "curl --negotiate -u : $curlargs $url", returnStdout: true

        response = readFile file: 'response.json'

      }

@@ -1,9 +1,14 @@ 

  // Build an empty module and verify that the CGImport works correctly

  

  def runTests() {

-   def clientcert = ca.get_ssl_cert(env.KOJI_ADMIN)

-   koji.setConfig("https://${env.KOJI_SSL_HOST}/kojihub", "https://${env.KOJI_SSL_HOST}/kojifiles",

-                  clientcert.cert, clientcert.key, ca.get_ca_cert().cert)

+   def koji_admin = controller.getVar('KOJI_ADMIN')

+   def clientcert = controller.httpGet("/ca/${koji_admin}", true)

+   def koji_ssl_host = controller.getVar('KOJI_HUB_HOST')

+   def mbs_host = controller.getVar('MBS_FRONTEND_HOST')

+   def ca_cert = controller.httpGet("/ca/cacert")

+   koji.setConfig("https://${koji_ssl_host}/kojihub",

+                  "https://${koji_ssl_host}/kojifiles",

+                  clientcert.cert, clientcert.key, ca_cert)

    def tags = koji.callMethod("listTags")

    if (!tags.any { it.name == "module-f28" }) {

      koji.addTag("module-f28")
@@ -14,17 +19,17 @@ 

    try {

      // There's currently no way to query whether a given user has CG access, so just add it

      // and hope no one else has already done it.

-     koji.runCmd("grant-cg-access", env.KOJI_ADMIN, "module-build-service", "--new")

+     koji.runCmd("grant-cg-access", koji_admin, "module-build-service", "--new")

    } catch (ex) {

-     echo "Granting cg-access to ${env.KOJI_ADMIN} failed, assuming it was already provided in a previous test"

+     echo "Granting cg-access to ${koji_admin} failed, assuming it was already provided in a previous test"

    }

  

    if (!koji.callMethod("listBTypes").any { it.name == "module" }) {

      koji.callMethodLogin("addBType", "module")

    }

  

-   writeFile file: 'ca-cert.pem', text: ca.get_ca_cert().cert

-   def url = "https://${env.MBS_SSL_HOST}/module-build-service/1/module-builds/"

+   writeFile file: 'ca-cert.pem', text: ca_cert

+   def url = "https://${mbs_host}/module-build-service/1/module-builds/"

    def curlargs = """

      --cacert ca-cert.pem \

      -H 'Content-Type: application/json' \
@@ -34,12 +39,12 @@ 

      -w '%{http_code}'

    """.trim()

    def http_code, response

-   if (env.KRB5_REALM) {

+   if (controller.getVar("KRB5_REALM")) {

      writeFile file: 'buildparams.json', text: """

        {"scmurl": "https://src.fedoraproject.org/forks/mikeb/modules/testmodule.git?#8b3fb16160f899ce10905faf570f110d52b91154",

         "branch": "empty-f28"}

      """

-     krb5.withKrb {

+     krb5.withKrb(controller.getKrb5Vars(koji_admin)) {

        http_code = sh script: "curl --negotiate -u : $curlargs $url", returnStdout: true

        response = readFile file: 'response.json'

      }
@@ -47,7 +52,7 @@ 

      writeFile file: 'buildparams.json', text: """

        {"scmurl": "https://src.fedoraproject.org/forks/mikeb/modules/testmodule.git?#8b3fb16160f899ce10905faf570f110d52b91154",

         "branch": "empty-f28",

-        "owner":  "${env.KOJI_ADMIN}"}

+        "owner":  "${koji_admin}"}

      """

      http_code = sh script: "curl $curlargs $url", returnStdout: true

      response = readFile file: 'response.json'

@@ -1,9 +1,14 @@ 

  // Submit a build to MBS and verify that it initializes Koji correctly

  

  def runTests() {

-   def clientcert = ca.get_ssl_cert(env.KOJI_ADMIN)

-   koji.setConfig("https://${env.KOJI_SSL_HOST}/kojihub", "https://${env.KOJI_SSL_HOST}/kojifiles",

-                  clientcert.cert, clientcert.key, ca.get_ca_cert().cert)

+   def koji_admin = controller.getVar('KOJI_ADMIN')

+   def clientcert = controller.httpGet("/ca/${koji_admin}", true)

+   def koji_ssl_host = controller.getVar('KOJI_HUB_HOST')

+   def mbs_host = controller.getVar('MBS_FRONTEND_HOST')

+   def ca_cert = controller.httpGet("/ca/cacert")

+   koji.setConfig("https://${koji_ssl_host}/kojihub",

+                  "https://${koji_ssl_host}/kojifiles",

+                  clientcert.cert, clientcert.key, ca_cert)

    def tags = koji.callMethod("listTags")

    if (!tags.any { it.name == "module-f28" }) {

      koji.addTag("module-f28")
@@ -11,8 +16,8 @@ 

    if (!tags.any { it.name == "module-f28-build" }) {

      koji.addTag("module-f28-build", "--parent=module-f28", "--arches=x86_64")

    }

-   writeFile file: 'ca-cert.pem', text: ca.get_ca_cert().cert

-   def url = "https://${env.MBS_SSL_HOST}/module-build-service/1/module-builds/"

+   writeFile file: 'ca-cert.pem', text: ca_cert

+   def url = "https://${mbs_host}/module-build-service/1/module-builds/"

    def curlargs = """

      --cacert ca-cert.pem \

      -H 'Content-Type: application/json' \
@@ -22,12 +27,12 @@ 

      -w '%{http_code}'

    """.trim()

    def http_code, response

-   if (env.KRB5_REALM) {

+   if (controller.getVar("KRB5_REALM")) {

      writeFile file: 'buildparams.json', text: """

        {"scmurl": "https://src.fedoraproject.org/modules/testmodule.git?#9c589780e1dd1698dc64dfa28d30014ad18cad32",

         "branch": "f28"}

      """

-     krb5.withKrb {

+     krb5.withKrb(controller.getKrb5Vars(koji_admin)) {

        http_code = sh script: "curl --negotiate -u : $curlargs $url", returnStdout: true

        response = readFile file: 'response.json'

      }
@@ -35,7 +40,7 @@ 

      writeFile file: 'buildparams.json', text: """

        {"scmurl": "https://src.fedoraproject.org/modules/testmodule.git?#9c589780e1dd1698dc64dfa28d30014ad18cad32",

         "branch": "f28",

-        "owner":  "${env.KOJI_ADMIN}"}

+        "owner":  "${koji_admin}"}

      """

      http_code = sh script: "curl $curlargs $url", returnStdout: true

      response = readFile file: 'response.json'

file modified
+6 -4
@@ -1,10 +1,11 @@ 

+ distro

+ dogpile.cache

  enum34

+ fedmsg

  Flask

  Flask-Migrate

- Flask-SQLAlchemy

  Flask-Script

- dogpile.cache

- fedmsg

+ Flask-SQLAlchemy

  funcsigs # Python2 only

  futures # Python 2 only

  kobo>=0.5.0
@@ -13,8 +14,9 @@ 

  moksha.hub

  munch

  prometheus_client

- pyOpenSSL

  pygobject

+ pyOpenSSL

  requests

  six

  sqlalchemy

+ celery

file modified
+1 -2
@@ -24,7 +24,7 @@ 

  setup(

      name="module-build-service",

      description="The Module Build Service for Modularity",

-     version="2.30.1",

+     version="2.30.4",

      classifiers=["Programming Language :: Python", "Topic :: Software Development :: Build Tools"],

      keywords="module build service fedora modularity koji mock rpm",

      author="The Factory 2.0 Team",
@@ -44,7 +44,6 @@ 

              "mbs-manager = module_build_service.manage:manager_wrapper",

          ],

          "moksha.consumer": "mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer",

-         "moksha.producer": "mbspoller = module_build_service.scheduler.producer:MBSProducer",

          "mbs.messaging_backends": [

              "fedmsg = module_build_service.messaging:_fedmsg_backend",

              "in_memory = module_build_service.messaging:_in_memory_backend",

file modified
+1
@@ -109,6 +109,7 @@ 

      # clean_database is usually called before a test run. So, it makes no sense

      # to keep any changes in the transaction made by previous test.

      db_session.remove()

+     db_session.configure(bind=db.session.get_bind())

  

      db.drop_all()

      db.create_all()

file modified
+12 -2
@@ -11,8 +11,13 @@ 

  top-level directory of this repository. This can be changed to a different

  path by setting ``MBS_TEST_CONFIG``.

  

- See `tests/integration/example.test.env.yaml`_ for the list of configuration

- options and examples.

+ Usually each test will trigger a new module build, and potentially wait until

+ it completes before doing the checks. In order to avoid waiting for this

+ during test development, an existing module build can be reused by specifying

+ a ``build_id`` for the test case.

+ 

+ See `tests/integration/example.test.env.yaml`_ for a complete list of

+ configuration options and examples.

  

  Running the tests

  =================
@@ -30,4 +35,9 @@ 

  

      REQUESTS_CA_BUNDLE=/etc/pki/tls/certs/ca-bundle.crt tox -e integration

  

+ ``MBS_TEST_WORKERS`` can be used to run the tests in parallel. For example to

+ have 4 tests running in parallel one could call::

+ 

+     MBS_TEST_WORKERS=4 tox -e integration

+ 

  .. _tests/integration/example.test.env.yaml: example.test.env.yaml

file modified
+42 -13
@@ -2,14 +2,21 @@ 

  # SPDX-License-Identifier: MIT

  

  import os

+ import sys

+ import tempfile

  

- import yaml

  import pytest

+ import sh

+ import yaml

+ 

+ import utils

  

- from utils import MBS, Git, Koji

+ our_sh = sh(_out=sys.stdout, _err=sys.stderr, _tee=True)

+ from our_sh import pushd, Command  # noqa

  

  

- def load_test_env():

+ @pytest.fixture(scope="session")

+ def test_env():

      """Load test environment configuration

  

      :return: Test environment configuration.
@@ -21,19 +28,41 @@ 

      return env

  

  

- test_env = load_test_env()

+ @pytest.fixture(scope="function")

+ def repo(request, test_env):

+     """Clone the module repo to be used by the test

  

+     Find out the name of the test (anything that follow "test_"), and get

+     the corresponding module repo from the test environment configuration.

  

- @pytest.fixture(scope="session")

- def mbs():

-     return MBS(test_env["mbs_api"])

+     Clone the repo in a temporary location and switch the current working

+     directory into it.

  

- 

- @pytest.fixture(scope="session")

- def git():

-     return Git(test_env["git_url"])

+     :param pytest.FixtureRequest request: request object giving access

+         to the requesting test context

+     :param pytest.fixture test_env: test environment fixture

+     :return: repository object the tests can work with

+     :rtype: utils.Repo

+     """

+     with tempfile.TemporaryDirectory() as tempdir:

+         testname = request.function.__name__.split("test_", 1)[1]

+         repo_conf = test_env["testdata"][testname]

+         packaging_util = Command(test_env["packaging_utility"]).bake(

+             _out=sys.stdout, _err=sys.stderr, _tee=True

+         )

+         args = [

+             "--branch",

+             repo_conf["branch"],

+             f"modules/{repo_conf['module']}",

+             tempdir,

+         ]

+         packaging_util("clone", *args)

+         with pushd(tempdir):

+             yield utils.Repo(repo_conf["module"])

  

  

  @pytest.fixture(scope="session")

- def koji():

-     return Koji(**test_env["koji"])

+ def koji(test_env):

+     """Koji session for the instance MBS is configured to work with

+     """

+     return utils.Koji(**test_env["koji"])

@@ -1,9 +1,63 @@ 

  ---

+ # Utility to be used to clone and build the modules.

+ # It's configuration points to the dist-git where

+ # test modules are to be found.

+ packaging_utility: fedpkg

  # API endpoint of the MBS instance under test.

  mbs_api: https://mbs.fedoraproject.org/module-build-service/2/module-builds/

- # Git instance used by the build system.

- git_url: https://src.fedoraproject.org/

  # Koji instance the MBS instance under test communicates with.

  koji:

    server: https://koji.fedoraproject.org/kojihub

    topurl: https://kojipkgs.fedoraproject.org/

+ # Test data to be used by the tests.

+ # Items in here are mapped by their name to the tests that use them.

+ # For example test_scratch_build will use scratch_build.

+ testdata:

+   scratch_build:

+     # MBS build id to be reused for this test.

+     # When specified no new build is started for this test,

+     # but the existing one reused.

+     build_id: 1234

+     # Name of the module.

+     module: testmodule

+     # Branch which is going to be built for this test.

+     branch: scratch-build-branch

+   failed_build:

+     build_id: 1234

+     module: testmodule

+     branch: failed-build-branch

+     # Batch considered by this test.

+     batch: 2

+     # List of components expected to fail in the batch.

+     failing_components:

+     - comp1

+     # List of components expected to complete or canceled in the batch.

+     canceled_components:

+     - comp2

+   normal_build:

+     build_id: 1234

+     module: testmodule

+     branch: normal-build-branch

+     # List of components in order they should be build in. One set represents one batch.

+     buildorder: [{"module-build-macros"}, {"attr"}, {"acl"}]

+     # True if buildrequire a Platform stream representing a GA RHEL release

+     platform_is_ga: true

+   resume_cancelled_build:

+     # This scenario doesn't support reusing past builds. "build_id" is not used.

+     module: testmodule

+     branch: cancel-build-branch

+   reuse_all_components:

+     build_id: 1234

+     build_id_reused: 1235

+     module: testmodule

+     branch: reuse-all-components-branch

+   reuse_components:

+     # Build id to be reused for this test, it will be used to set a baseline.

+     baseline_build_id: 1234

+     build_id: 1235

+     module: testmodule

+     branch: reuse-components-build-branch

+     # Name of package that will be changed and will be rebuild in the build.

+     package: acl

+     # Name of the branch which is going to be built for the specified package.

+     component_branch: private-test-reuse-components

@@ -0,0 +1,33 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import utils

+ 

+ 

+ def test_failed_build(test_env, repo, koji):

+     """

+     Run the build with "rebuild_strategy=all".

+ 

+     Check that:

+       * Check that the module build eventually fails

+       * Check that any other components in the same batch as the failed component are

+         cancelled, if not completed.

+     """

+     build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     repo.bump()

+     build.run(

+         "--optional",

+         "rebuild_strategy=all",

+         reuse=test_env["testdata"]["failed_build"].get("build_id"),

+     )

+     build.watch()

+ 

+     assert build.state_name == "failed"

+     batch = test_env["testdata"]["failed_build"]["batch"]

+     failing_components = test_env["testdata"]["failed_build"]["failing_components"]

+     canceled_components = test_env["testdata"]["failed_build"]["canceled_components"]

+     assert sorted(failing_components) == sorted(build.component_names(state="FAILED", batch=batch))

+     assert sorted(canceled_components) == sorted(

+         build.component_names(state="COMPLETE", batch=batch)

+         + build.component_names(state="CANCELED", batch=batch)

+     )

@@ -1,8 +1,47 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

  

+ import utils

  

- def test_normal_build():

-     """ TODO(csomh): implement this test

+ 

+ def test_normal_build(test_env, repo, koji):

+     """

+     Run build with `rhpkg-stage module-build --optional rebuild_strategy=all`

+ 

+     Checks:

+     * Check that MBS will submit all the component builds

+     * Check that buildorder of components is respected

+     * Check that MBS will create two content generator builds representing the module:

+         - [module]

+         - [module]-devel

+     * Check that MBS changed the buildrequired platform to have a suffix of “z”

+         if a Platform stream is representing a GA RHEL release.

      """

-     assert True

+     build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     repo.bump()

+     build_id = build.run(

+         "--optional",

+         "rebuild_strategy=all",

+         reuse=test_env["testdata"]["normal_build"].get("build_id"),

+     )

+     build.watch()

+ 

+     assert sorted(build.component_names()) == sorted(repo.components + ["module-build-macros"])

+ 

+     expected_buildorder = test_env["testdata"]["normal_build"]["buildorder"]

+     expected_buildorder = [set(batch) for batch in expected_buildorder]

+     actual_buildorder = build.batches()

+     assert actual_buildorder == expected_buildorder

+ 

+     cg_build = koji.get_build(build.nvr())

+     cg_devel_build = koji.get_build(build.nvr(name_suffix="-devel"))

+     assert cg_build and cg_devel_build

+     assert cg_devel_build['extra']['typeinfo']['module']['module_build_service_id'] == int(build_id)

+ 

+     modulemd = koji.get_modulemd(cg_build)

+     actual_platforms = modulemd["data"]["dependencies"][0]["buildrequires"]["platform"]

+     expected_platforms = repo.platform

+     platform_ga = test_env["testdata"]["normal_build"].get("platform_is_ga")

+     if platform_ga:

+         expected_platforms = [f"{pf}.z" for pf in expected_platforms]

+     assert expected_platforms == actual_platforms

@@ -0,0 +1,33 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import utils

+ import time

+ 

+ 

+ def test_resume_cancelled_build(test_env, repo, koji):

+     """

+     Run the  build with "rebuild_strategy=all".

+     Wait until the module-build-macros build is submitted to Koji.

+     Cancel module build.

+     Resume the module with "rhpkg-stage module-build -w".

+ 

+     Check that:

+       * Check that the testmodule had actually been cancelled

+       * Check that the testmodule build succeeded

+ 

+     """

+     build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     repo.bump()

+     build.run(

+         "--optional",

+         "rebuild_strategy=all",

+     )

+     build.wait_for_koji_task_id(package="module-build-macros", batch=1)

+     build.cancel()

+     # Behave like a human: restarting the build too quickly would lead to an error.

+     time.sleep(10)

+     build.run("--watch")

+ 

+     assert build.state_name == "ready"

+     assert build.was_cancelled()

@@ -0,0 +1,36 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import utils

+ 

+ 

+ def test_reuse_all_components(test_env, repo, koji):

+     """Rebuild the test module again, without changing any of the components with:

+ 

+     `fedpkg module-build -w --optional rebuild_strategy=only-changed`

+ 

+     Checks:

+     * Verify that all the components are reused from the first build.

+     * Verify that module-build-macros is not built in the second build.

+     """

+     build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     repo.bump()

+     build.run(

+         "--watch",

+         "--optional",

+         "rebuild_strategy=all",

+         reuse=test_env["testdata"]["reuse_all_components"].get("build_id"),

+     )

+     task_ids = build.component_task_ids()

+     task_ids.pop("module-build-macros")

+ 

+     repo.bump()

+     build.run(

+         "-w",

+         "--optional",

+         "rebuild_strategy=only-changed",

+         reuse=test_env["testdata"]["reuse_all_components"].get("build_id_reused"))

+     reused_task_ids = build.component_task_ids()

+ 

+     assert not build.components(package="module-build-macros")

+     assert task_ids == reused_task_ids

@@ -0,0 +1,55 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import utils

+ 

+ 

+ def test_reuse_components(test_env, repo, koji):

+     """

+     Bump the commit of one of the components that MBS uses.

+     Bump the commit of the same testmodule that was mentioned in the preconditions.

+     Submit a testmodule build again with `fedpkg module-build -w --optional

+         rebuild_strategy=only-changed` (this is the default rebuild strategy).

+ 

+     Checks:

+     * Verify all the components were reused except for the component that had their commit change.

+     * Verify that the component with the changed commit was rebuilt.

+     """

+     repo.bump()

+     baseline_build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     baseline_build.run(

+         "--watch",

+         "--optional",

+         "rebuild_strategy=all",

+         reuse=test_env["testdata"]["reuse_components"].get("baseline_build_id"),

+     )

+ 

+     package = test_env["testdata"]["reuse_components"].get("package")

+     component = utils.Component(

+         package,

+         test_env["testdata"]["reuse_components"].get("component_branch")

+     )

+     component.clone(test_env["packaging_utility"])

+     component.bump()

+ 

+     repo.bump()

+     build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     build.run(

+         "--watch",

+         "--optional",

+         "rebuild_strategy=only-changed",

+         reuse=test_env["testdata"]["reuse_components"].get("build_id"),

+     )

+ 

+     comp_task_ids_base = baseline_build.component_task_ids()

+     comp_task_ids = build.component_task_ids()

+     comp_task_ids_base.pop('module-build-macros')

+     comp_task_ids.pop('module-build-macros')

+     changed_package_base_task_id = comp_task_ids_base.pop(package)

+     changed_package_task_id = comp_task_ids.pop(package)

+     assert changed_package_base_task_id != changed_package_task_id

+     assert comp_task_ids == comp_task_ids_base

+ 

+     assert build.components(package=package)[0]['state_name'] == 'COMPLETE'

+     state_reason = build.components(package=package)[0]['state_reason']

+     assert state_reason != "Reused component from previous module build"

@@ -0,0 +1,33 @@ 

+ # -*- coding: utf-8 -*-

+ # SPDX-License-Identifier: MIT

+ 

+ import utils

+ 

+ 

+ def test_scratch_build(test_env, repo, koji):

+     """

+     Run a scratch build with "rebuild_strategy=all".

+ 

+     Check that:

+     * the module build is done with the correct components

+     * the module build completes in the "done" state

+       (as opposed to the "ready" state)

+     * no content generator builds are created in Koji

+     """

+     build = utils.Build(test_env["packaging_utility"], test_env["mbs_api"])

+     build.run(

+         "--scratch",

+         "--optional",

+         "rebuild_strategy=all",

+         reuse=test_env["testdata"]["scratch_build"].get("build_id"),

+     )

+     build.watch()

+ 

+     assert build.state_name == "done"

+     assert sorted(build.component_names(state="COMPLETE")) == sorted(

+         repo.components + ["module-build-macros"]

+     )

+ 

+     cg_build = koji.get_build(build.nvr())

+     cg_devel_build = koji.get_build(build.nvr(name_suffix="-devel"))

+     assert not (cg_build or cg_devel_build)

file modified
+360 -9
@@ -1,21 +1,372 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

  

+ import re

+ import sys

+ import time

  

- class MBS:

+ from kobo import rpmlib

+ import koji

+ import yaml

+ import requests

+ import tempfile

+ import sh

  

-     def __init__(self, api):

-         self._api = api

- 

- 

- class Git:

- 

-     def __init__(self, url):

-         self._url = url

+ our_sh = sh(_out=sys.stdout, _err=sys.stderr, _tee=True)

+ from our_sh import Command, git, pushd  # noqa

  

  

  class Koji:

+     """Wrapper class to work with Koji

+ 

+     :attribute string _server: URL of the Koji hub

+     :attribute string _topurl: URL of the top-level Koji download location

+     :attribute koji.ClientSession _session: Koji session

+     :attribute koji.PathInfo _pathinfo: Koji path

+     """

  

      def __init__(self, server, topurl):

          self._server = server

          self._topurl = topurl

+         self._session = koji.ClientSession(self._server)

+         self._pathinfo = koji.PathInfo(self._topurl)

+ 

+     def get_build(self, nvr_dict):

+         """Koji build data for NVR

+ 

+         :param dict nvr_dict: NVR dictionary as expected by kobo.rpmlib.make_nvr()

+         :return: Dictionary with Koji build data or None, if build is not found

+         :rtype: dict or None

+         """

+         nvr_string = rpmlib.make_nvr(nvr_dict)

+         return self._session.getBuild(nvr_string)

+ 

+     def get_modulemd(self, cg_build):

+         """Modulemd file of the build from koji archive

+ 

+         :param cg_build: koji build object

+         :return: Modulemd file

+         :rtype: dict

+         """

+         url = self._pathinfo.typedir(cg_build, 'module')

+         r = requests.get(f"{url}/modulemd.txt")

+         r.raise_for_status()

+         return yaml.safe_load(r.content)

+ 

+ 

+ class Repo:

+     """Wrapper class to work with module git repositories

+ 

+     :attribute string module_name: name of the module stored in this repo

+     :attribute dict _modulemd: Modulemd file as read from the repo

+     """

+ 

+     def __init__(self, module_name):

+         self.module_name = module_name

+         self._modulemd = None

+         self._version = None

+ 

+     @property

+     def modulemd(self):

+         """Modulemd file as read from the repo

+ 

+         :return: Modulemd file as read from the repo

+         :rtype: dict

+         """

+         if self._modulemd is None:

+             modulemd_file = self.module_name + ".yaml"

+             with open(modulemd_file, "r") as f:

+                 self._modulemd = yaml.safe_load(f)

+         return self._modulemd

+ 

+     @property

+     def components(self):

+         """List of components as defined in the modulemd file

+ 

+         :return: List of components as defined in the modulemd file

+         :rtype: list of strings

+         """

+         return list(self.modulemd["data"]["components"]["rpms"])

+ 

+     @property

+     def platform(self):

+         """

+         List of platforms in the modulemd file, obtaining values differs on version

+ 

+         :return: List of platforms in the modulemd file

+         :rtype: list of strings

+         """

+         if self._version is None:

+             self._version = self._modulemd["version"]

+         if self._version == 1:

+             return [self._modulemd["data"]["dependencies"]["buildrequires"].get("platform")]

+         elif self._version == 2:

+             return self._modulemd["data"]["dependencies"][0]["buildrequires"].get("platform")

+ 

+     def bump(self):

+         """Create a "bump" commit"""

+         args = [

+             "--allow-empty",

+             "-m",

+             "Bump"

+         ]

+         git("commit", *args)

+         git("push")

+ 

+ 

+ class Build:

+     """Wrapper class to work with module builds

+ 

+     :attribute sh.Command _packaging_utility: packaging utility command used to

+         kick off this build

+     :attribute string _mbs_api: URL of the MBS API (including trailing '/')

+     :attribute string _url: URL of this MBS module build

+     :attribute string _data: Module build data cache for this build fetched from MBS

+     :attribute string _module_build_data: Verbose module build data cache for this build

+     """

+ 

+     def __init__(self, packaging_utility, mbs_api):

+         self._packaging_utility = Command(packaging_utility).bake(

+             _out=sys.stdout, _err=sys.stderr, _tee=True

+         )

+         self._mbs_api = mbs_api

+         self._data = None

+         self._component_data = None

+         self._build_id = None

+         self._module_build_data = None

+ 

+     def run(self, *args, reuse=None):

+         """Run a module build

+ 

+         :param args: Options and arguments for the build command

+         :param int reuse: Optional MBS build id to be reused for this run.

+             When specified, the corresponding module build will be used,

+             instead of triggering and waiting for a new one to finish.

+             Intended to be used while developing the tests.

+         :return: MBS build id of the build created

+         :rtype: int

+         """

+         current_build_id = self._build_id

+         if reuse is not None:

+             self._build_id = int(reuse)

+         else:

+             stdout = self._packaging_utility("module-build", *args).stdout.decode("utf-8")

+             self._build_id = int(re.search(self._mbs_api + r"module-builds/(\d+)", stdout).group(1))

+         # Clear cached data

+         if current_build_id != self._build_id:

+             self._component_data = None

+         return self._build_id

+ 

+     def watch(self):

+         """Watch the build till the finish"""

+         if self._build_id is None:

+             raise RuntimeError("Build was not started. Cannot watch.")

+ 

+         stdout = self._packaging_utility(

+             "module-build-watch", str(self._build_id)

+         ).stdout.decode("utf-8")

+ 

+         return stdout

+ 

+     def cancel(self):

+         """Cancel the module build

+ 

+         :return: Standard output of the "module-build-cancel <build id=""> command

+         :rtype: str

+         """

+         stdout = self._packaging_utility("module-build-cancel", self._build_id).stdout.decode(

+             "utf-8")

+         return stdout

+ 

+     @property

+     def data(self):

+         """Module build data cache for this build fetched from MBS"""

+         if self._data is None and self._build_id:

+             r = requests.get(f"{self._mbs_api}module-builds/{self._build_id}")

+             r.raise_for_status()

+             self._data = r.json()

+         return self._data

+ 

+     @property

+     def component_data(self):

+         """Component data for the module build"""

+         if self._component_data is None and self._build_id:

+             params = {

+                 "module_build": self._build_id,

+                 "verbose": True,

+             }

+             r = requests.get(f"{self._mbs_api}component-builds/", params=params)

+             r.raise_for_status()

+             self._component_data = r.json()

+         return self._component_data

+ 

+     @property

+     def module_build_data(self):

+         """Verbose module build

+ 

+         :return: Dictionary of the verbose module build parameters

+         :rtype: dict

+         """

+         if self._build_id:

+             params = {

+                 "verbose": True,

+             }

+             r = requests.get(f"{self._mbs_api}module-builds/{self._build_id}", params=params)

+             r.raise_for_status()

+             self._module_build_data = r.json()

+         return self._module_build_data

+ 

+     @property

+     def state_name(self):

+         """Name of the state of this module build"""

+         return self.data["state_name"]

+ 

+     def components(self, state=None, batch=None, package=None):

+         """Components of this module build, optionally filtered based on properties

+ 

+         :param string state: Koji build state the components should be in

+         :param int batch: the number of the batch the components should be in

+         :param string package: name of the component (package)

+         :return: List of filtered components

+         :rtype: list of dict

+         """

+         filtered = self.component_data["items"]

+         if batch is not None:

+             filtered = filter(lambda x: x["batch"] == batch, filtered)

+         if state is not None:

+             filtered = filter(lambda x: x["state_name"] == state, filtered)

+         if package is not None:

+             filtered = filter(lambda x: x["package"] == package, filtered)

+ 

+         return list(filtered)

+ 

+     def component_names(self, state=None, batch=None, package=None):

+         """Component names of this module build, optionally filtered based on properties

+ 

+         :param string state: Koji build state the components should be in

+         :param int batch: the number of the batch the components should be in

+         :param string package: name of component (package):

+         :return: List of components packages

+         :rtype: list of strings

+         """

+         components = self.components(state, batch, package)

+         return [item["package"] for item in components]

+ 

+     def component_task_ids(self):

+         """Dictionary containing all names of packages from build and appropriate task ids

+ 

+         :return: Dictionary containing name of packages and their task id

+         :rtype: dict

+         """

+         return {comp["package"]: comp["task_id"] for comp in self.components()}

+ 

+     def batches(self):

+         """

+         Components of the module build separated in sets according to batches

+ 

+         :return: list of components according to batches

+         :rtype: list of sets

+         """

+         comps_data = sorted(self.component_data["items"], key=lambda x: x["batch"])

+         batch_count = comps_data[-1]["batch"]

+         batches = batch_count * [set()]

+         for data in comps_data:

+             batch = data["batch"]

+             package = data["package"]

+             batches[batch - 1] = batches[batch - 1].union({package})

+ 

+         return batches

+ 

+     def wait_for_koji_task_id(self, package, batch, timeout=300, sleep=10):

+         """Wait until the component is submitted to Koji (has a task_id)

+ 

+         :param string package: name of component (package)

+         :param int batch: the number of the batch the components should be in

+         :param int timeout: time in seconds

+         :param int sleep: time in seconds

+         """

+         start = time.time()

+         while time.time() - start <= timeout:

+             # Clear cached data

+             self._component_data = None

+             components = self.components(package=package, batch=batch)

+             # Wait until the right component appears and has a task_id

+             if components and components[0]["task_id"]:

+                 return components[0]["task_id"]

+             time.sleep(sleep)

+ 

+         raise RuntimeError(

+             f'Koji task for "{package}" did not start in {timeout} seconds'

+         )

+ 

+     def nvr(self, name_suffix=""):

+         """NVR dictionary of this module build

+ 

+         :param string name_suffix: an optional suffix for the name component of the NVR

+         :return: dictionary with NVR components

+         :rtype: dict

+         """

+         return {

+             "name": f'{self.data["name"]}{name_suffix}',

+             "version": self.data["stream"].replace("-", "_"),

+             "release": f'{self.data["version"]}.{self.data["context"]}',

+         }

+ 

+     def was_cancelled(self):

+         """Checking in the status trace if module was canceled

+ 

+         :return: Whether exists required status

+         :rtype: bool

+         """

+         for item in self.module_build_data["state_trace"]:

+             if (

+                     item["reason"] is not None

+                     and "Canceled" in item["reason"]

+                     and item["state_name"] == "failed"

+             ):

+                 return True

+         return False

+ 

+ 

+ class Component:

+     """Wrapper class to work with git repositories of components

+ 

+     :attribute string module_name: name of the module stored in this repo

+     :attribute string branch: branch of the git repo that will be used

+     :attribute TemporaryDirectory _clone_dir: directory where is the clone of the repo

+     """

+     def __init__(self, module_name, branch):

+         self._module_name = module_name

+         self._branch = branch

+         self._clone_dir = None

+ 

+     def __del__(self):

+         self._clone_dir.cleanup()

+ 

+     def clone(self, packaging_utility):

+         """Clone the git repo of the component to be used by the test in a temporary location

+ 

+         Directory of the clone is stored in self._clone_dir.

+         :param string packaging_utility: packaging utility as defined in test.env.yaml

+         """

+         tempdir = tempfile.TemporaryDirectory()

+         args = [

+             "--branch",

+             self._branch,

+             f'rpms/{self._module_name}',

+             tempdir.name

+         ]

+         packaging_util = Command(packaging_utility)

+         packaging_util("clone", *args)

+         self._clone_dir = tempdir

+ 

+     def bump(self):

+         """Create a "bump" commit and push it in git"""

+         args = [

+             "--allow-empty",

+             "-m",

+             "Bump"

+         ]

+         with pushd(self._clone_dir.name):

+             git("commit", *args)

+             git("push")

file modified
+141 -60
@@ -1,5 +1,6 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

+ import sched

  import koji

  import os

  import re
@@ -9,15 +10,21 @@ 

  from datetime import datetime, timedelta

  from random import randint

  import hashlib

+ import moksha.hub

+ import fedmsg

  

  import module_build_service.messaging

  import module_build_service.scheduler.consumer

  import module_build_service.scheduler.handlers.repos

  import module_build_service.utils

  from module_build_service.errors import Forbidden

- from module_build_service import models, conf, build_logs

+ from module_build_service import models, conf, build_logs, log

  from module_build_service.db_session import db_session

- from module_build_service.scheduler import make_simple_stop_condition

+ from module_build_service.scheduler import events

+ from module_build_service.scheduler.handlers.tags import tagged as tagged_handler

+ from module_build_service.scheduler.handlers.components import (

+     build_task_finalize as build_task_finalize_handler)

+ from module_build_service.scheduler.handlers.repos import done as repos_done_handler

  

  from mock import patch, PropertyMock, Mock, MagicMock

  from werkzeug.datastructures import FileStorage
@@ -27,9 +34,8 @@ 

  import json

  import itertools

  

- from module_build_service.builder.base import GenericBuilder

+ from module_build_service.builder import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

- from module_build_service.messaging import MBSModule

  from tests import (

      app, clean_database, read_staged_data, staged_data_filename

  )
@@ -39,6 +45,83 @@ 

  user = ("Homer J. Simpson", {"packager"})

  

  

+ def make_simple_stop_condition():

+     """ Return a simple stop_condition callable.

+ 

+     Intended to be used with the main() function here in manage.py and tests.

+ 

+     The stop_condition returns true when the latest module build enters the any

+     of the finished states.

+     """

+     def stop_condition(message):

+         # XXX - We ignore the message here and instead just query the DB.

+ 

+         # Grab the latest module build.

+         module = (

+             db_session.query(models.ModuleBuild)

+             .order_by(models.ModuleBuild.id.desc())

+             .first()

+         )

+         done = (

+             models.BUILD_STATES["failed"],

+             models.BUILD_STATES["ready"],

+             models.BUILD_STATES["done"],

+         )

+         result = module.state in done

+         log.debug("stop_condition checking %r, got %r" % (module, result))

+ 

+         # moksha.hub.main starts the hub and runs it in a separate thread. When

+         # the result is True, remove the db_session from that thread local so

+         # that any pending queries in the transaction will not block other

+         # queries made from other threads.

+         # This is useful for testing particularly.

+         if result:

+             db_session.remove()

+ 

+         return result

+ 

+     return stop_condition

+ 

+ 

+ def main(initial_messages, stop_condition):

+     """ Run the consumer until some condition is met.

+ 

+     Setting stop_condition to None will run the consumer forever.

+     """

+ 

+     config = fedmsg.config.load_config()

+     config["mbsconsumer"] = True

+     config["mbsconsumer.stop_condition"] = stop_condition

+     config["mbsconsumer.initial_messages"] = initial_messages

+ 

+     # Moksha requires that we subscribe to *something*, so tell it /dev/null

+     # since we'll just be doing in-memory queue-based messaging for this single

+     # build.

+     config["zmq_enabled"] = True

+     config["zmq_subscribe_endpoints"] = "ipc:///dev/null"

+ 

+     # Lazy import consumer to avoid potential import cycle.

+     # For example, in some cases, importing event message from events.py would

+     # cause importing the consumer module, which then starts to import relative

+     # code inside handlers module, and the original code is imported eventually.

+     import module_build_service.scheduler.consumer

+ 

+     consumers = [module_build_service.scheduler.consumer.MBSConsumer]

+ 

+     # Note that the hub we kick off here cannot send any message.  You

+     # should use fedmsg.publish(...) still for that.

+     moksha.hub.main(

+         # Pass in our config dict

+         options=config,

+         # Only run the specified consumers if any are so specified.

+         consumers=consumers,

+         # Do not run default producers.

+         producers=[],

+         # Tell moksha to quiet its logging.

+         framework=False,

+     )

+ 

+ 

  class FakeSCM(object):

      def __init__(self, mocked_scm, name, mmd_filename, commit=None, version=20180205135154):

          self.mocked_scm = mocked_scm
@@ -232,32 +315,20 @@ 

          return {"name": self.tag_name + "-build"}

  

      def _send_repo_done(self):

-         msg = module_build_service.messaging.KojiRepoChange(

-             msg_id="a faked internal message", repo_tag=self.tag_name + "-build")

-         module_build_service.scheduler.consumer.work_queue_put(msg)

+         events.scheduler.add(repos_done_handler, ("fake_msg", self.tag_name + "-build"))

  

      def _send_tag(self, artifact, nvr, dest_tag=True):

          if dest_tag:

              tag = self.tag_name

          else:

              tag = self.tag_name + "-build"

-         msg = module_build_service.messaging.KojiTagChange(

-             msg_id="a faked internal message", tag=tag, artifact=artifact, nvr=nvr)

-         module_build_service.scheduler.consumer.work_queue_put(msg)

+         events.scheduler.add(tagged_handler, ("a faked internal message", tag, artifact, nvr))

  

      def _send_build_change(self, state, name, build_id):

          # build_id=1 and task_id=1 are OK here, because we are building just

          # one RPM at the time.

-         msg = module_build_service.messaging.KojiBuildChange(

-             msg_id="a faked internal message",

-             build_id=build_id,

-             task_id=build_id,

-             build_name=name,

-             build_new_state=state,

-             build_release="1",

-             build_version="1",

-         )

-         module_build_service.scheduler.consumer.work_queue_put(msg)

+         args = ("a faked internal message", build_id, build_id, state, name, "1", "1", None, None)

+         events.scheduler.add(build_task_finalize_handler, args)

  

      def build(self, artifact_name, source):

          print("Starting building artifact %s: %s" % (artifact_name, source))
@@ -286,7 +357,6 @@ 

          pass

  

      def recover_orphaned_artifact(self, component_build):

-         msgs = []

          if self.INSTANT_COMPLETE:

              disttag = module_build_service.utils.get_rpm_release(

                  self.db_session, component_build.module_build)
@@ -298,28 +368,17 @@ 

              component_build.state_reason = "Found existing build"

              nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr)

              # Send a message stating the build is complete

-             msgs.append(

-                 module_build_service.messaging.KojiBuildChange(

-                     "recover_orphaned_artifact: fake message",

-                     randint(1, 9999999),

-                     component_build.task_id,

-                     koji.BUILD_STATES["COMPLETE"],

-                     component_build.package,

-                     nvr_dict["version"],

-                     nvr_dict["release"],

-                     component_build.module_build.id,

-                 )

-             )

+             args = ("recover_orphaned_artifact: fake message", randint(1, 9999999),

+                     component_build.task_id, koji.BUILD_STATES["COMPLETE"],

+                     component_build.package, nvr_dict["version"], nvr_dict["release"],

+                     component_build.module_build.id, None)

+             events.scheduler.add(build_task_finalize_handler, args)

              # Send a message stating that the build was tagged in the build tag

-             msgs.append(

-                 module_build_service.messaging.KojiTagChange(

-                     "recover_orphaned_artifact: fake message",

+             args = ("recover_orphaned_artifact: fake message",

                      component_build.module_build.koji_tag + "-build",

-                     component_build.package,

-                     component_build.nvr,

-                 )

-             )

-         return msgs

+                     component_build.package, component_build.nvr)

+             events.scheduler.add(tagged_handler, args)

+             return True

  

      def finalize(self, succeeded=None):

          if FakeModuleBuilder.on_finalize_cb:
@@ -339,7 +398,7 @@ 

  class BaseTestBuild:

  

      def run_scheduler(self, msgs=None, stop_condition=None):

-         module_build_service.scheduler.main(

+         main(

              msgs or [],

              stop_condition or make_simple_stop_condition()

          )
@@ -408,8 +467,18 @@ 

              return_value=True)

          self.mock_check_gating = self.p_check_gating.start()

  

+         self.patch_config_broker = patch.object(

+             module_build_service.config.Config,

+             "celery_broker_url",

+             create=True,

+             new_callable=PropertyMock,

+             return_value=False,

+         )

+         self.patch_config_broker.start()

+ 

      def teardown_method(self, test_method):

          self.p_check_gating.stop()

+         self.patch_config_broker.stop()

          FakeModuleBuilder.reset()

          cleanup_moksha()

          for i in range(20):
@@ -858,8 +927,9 @@ 

          new_callable=PropertyMock,

          return_value=2,

      )

+     @patch('module_build_service.scheduler.events.Scheduler.run', autospec=True)

      def test_try_to_reach_concurrent_threshold(

-         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

+         self, scheduler_run, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

          conf_system, dbg, hmsc

      ):

          """
@@ -886,24 +956,21 @@ 

          # the module build.

          TestBuild._global_var = []

  

-         def stop(message):

+         def mocked_scheduler_run(self):

              """

-             Stop the scheduler when the module is built or when we try to build

-             more components than the num_concurrent_builds.

+             Store the number of concurrent builds between each handler call to global list so we

+             can examine it later.

              """

-             main_stop = module_build_service.scheduler.make_simple_stop_condition()

              num_building = (

                  db_session.query(models.ComponentBuild)

                  .filter_by(state=koji.BUILD_STATES["BUILDING"])

                  .count()

              )

-             over_threshold = conf.num_concurrent_builds < num_building

              TestBuild._global_var.append(num_building)

-             result = main_stop(message) or over_threshold

-             db_session.remove()

-             return result

+             sched.scheduler.run(self)

  

-         self.run_scheduler(stop_condition=stop)

+         scheduler_run.side_effect = mocked_scheduler_run

+         self.run_scheduler()

  

          # _global_var looks similar to this: [0, 1, 0, 0, 2, 2, 1, 0, 0, 0]

          # It shows the number of concurrent builds in the time. At first we
@@ -1092,7 +1159,14 @@ 

          from module_build_service.db_session import db_session

  

          # Create a dedicated database session for scheduler to avoid hang

-         self.run_scheduler(msgs=[MBSModule("local module build", 3, 1)])

+         self.run_scheduler(

+             msgs=[{

+                 "msg_id": "local module build",

+                 "event": events.MBS_MODULE_STATE_CHANGE,

+                 "module_build_id": 3,

+                 "module_build_state": 1

+             }]

+         )

  

          reused_component_ids = {

              "module-build-macros": None,
@@ -1171,7 +1245,14 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         self.run_scheduler(msgs=[MBSModule("local module build", 3, 1)])

+         self.run_scheduler(

+             msgs=[{

+                 "msg_id": "local module build",

+                 "event": events.MBS_MODULE_STATE_CHANGE,

+                 "module_build_id": 3,

+                 "module_build_state": 1

+             }]

+         )

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1741,11 +1822,11 @@ 

          # Simulate a random repo regen message that MBS didn't expect

          cleanup_moksha()

          module = models.ModuleBuild.get_by_id(db_session, module_build_id)

-         msgs = [

-             module_build_service.messaging.KojiRepoChange(

-                 msg_id="a faked internal message", repo_tag=module.koji_tag + "-build"

-             )

-         ]

+         events_info = [{

+             "msg_id": "a faked internal message",

+             "event": events.KOJI_REPO_CHANGE,

+             "repo_tag": module.koji_tag + "-build"

+         }]

          db_session.expire_all()

          # Stop after processing the seeded message

  
@@ -1753,7 +1834,7 @@ 

              db_session.remove()

              return True

  

-         self.run_scheduler(msgs, stop_condition=stop)

+         self.run_scheduler(events_info, stop_condition=stop)

  

          # Make sure the module build didn't fail so that the poller can resume it later

          module = models.ModuleBuild.get_by_id(db_session, module_build_id)

file modified
+74 -51
@@ -13,9 +13,10 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.repos

  import module_build_service.models

- import module_build_service.builder

  from module_build_service import Modulemd

  from module_build_service.db_session import db_session

+ from module_build_service.builder import GenericBuilder

+ from module_build_service.scheduler import events

  from module_build_service.utils.general import mmd_to_str

  

  import pytest
@@ -88,6 +89,7 @@ 

  class TestKojiBuilder:

      def setup_method(self, test_method):

          init_data(1)

+         events.scheduler.reset()

          self.config = mock.Mock()

          self.config.koji_profile = conf.koji_profile

          self.config.koji_repository_url = conf.koji_repository_url
@@ -104,12 +106,13 @@ 

  

      def teardown_method(self, test_method):

          self.p_read_config.stop()

+         events.scheduler.reset()

  

      def test_tag_to_repo(self):

          """ Test that when a repo msg hits us and we have no match,

          that we do nothing gracefully.

          """

-         repo = module_build_service.builder.GenericBuilder.tag_to_repo(

+         repo = GenericBuilder.tag_to_repo(

              "koji", self.config, "module-base-runtime-0.25-9", "x86_64"

          )

          assert repo == (
@@ -144,26 +147,25 @@ 

          component_build.state = None

          component_build.nvr = None

  

-         actual = builder.recover_orphaned_artifact(component_build)

+         recovered = builder.recover_orphaned_artifact(component_build)

          # recover_orphaned_artifact modifies a component build, but doesn't

          # commit the changes.

          db_session.commit()

  

-         assert len(actual) == 3

-         assert type(actual[0]) == module_build_service.messaging.KojiBuildChange

-         assert actual[0].build_id == 91

-         assert actual[0].task_id == 12345

-         assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         assert actual[0].build_name == "rubygem-rails"

-         assert actual[0].build_version == "1.0"

-         assert actual[0].build_release == "1.module+e0095747"

-         assert actual[0].module_build_id == 4

-         assert type(actual[1]) == module_build_service.messaging.KojiTagChange

-         assert actual[1].tag == "module-foo-build"

-         assert actual[1].artifact == "rubygem-rails"

-         assert type(actual[2]) == module_build_service.messaging.KojiTagChange

-         assert actual[2].tag == "module-foo"

-         assert actual[2].artifact == "rubygem-rails"

+         assert recovered

+ 

+         event_info = events.scheduler.queue[0][3]

+         assert event_info == ('recover_orphaned_artifact: fake message', 91, 12345, 1,

+                               'rubygem-rails', '1.0', '1.module+e0095747', 4, None)

+ 

+         event_info = events.scheduler.queue[1][3]

+         assert event_info == ('recover_orphaned_artifact: fake message', 'module-foo-build',

+                               'rubygem-rails', 'foo-1.0-1.module+e0095747')

+ 

+         event_info = events.scheduler.queue[2][3]

+         assert event_info == ('recover_orphaned_artifact: fake message', 'module-foo',

+                               'rubygem-rails', 'foo-1.0-1.module+e0095747')

+ 

          assert component_build.state == koji.BUILD_STATES["COMPLETE"]

          assert component_build.task_id == 12345

          assert component_build.state_reason == "Found existing build"
@@ -201,18 +203,14 @@ 

          component_build.state = None

          db_session.commit()

  

-         actual = builder.recover_orphaned_artifact(component_build)

+         recovered = builder.recover_orphaned_artifact(component_build)

          db_session.commit()

  

-         assert len(actual) == 1

-         assert type(actual[0]) == module_build_service.messaging.KojiBuildChange

-         assert actual[0].build_id == 91

-         assert actual[0].task_id == 12345

-         assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         assert actual[0].build_name == "rubygem-rails"

-         assert actual[0].build_version == "1.0"

-         assert actual[0].build_release == "1.{0}".format(dist_tag)

-         assert actual[0].module_build_id == 4

+         assert recovered

+         event_info = events.scheduler.queue[0][3]

+         assert event_info == ('recover_orphaned_artifact: fake message', 91, 12345, 1,

+                               'rubygem-rails', '1.0', '1.module+2+b8661ee4', 4, None)

+ 

          assert component_build.state == koji.BUILD_STATES["COMPLETE"]

          assert component_build.task_id == 12345

          assert component_build.state_reason == "Found existing build"
@@ -255,18 +253,14 @@ 

          component_build.state = None

          db_session.commit()

  

-         actual = builder.recover_orphaned_artifact(component_build)

+         recovered = builder.recover_orphaned_artifact(component_build)

          db_session.commit()

  

-         assert len(actual) == 1

-         assert type(actual[0]) == module_build_service.messaging.KojiBuildChange

-         assert actual[0].build_id == 91

-         assert actual[0].task_id == 12345

-         assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         assert actual[0].build_name == "module-build-macros"

-         assert actual[0].build_version == "1.0"

-         assert actual[0].build_release == "1.{0}".format(dist_tag)

-         assert actual[0].module_build_id == 4

+         assert recovered

+         event_info = events.scheduler.queue[0][3]

+         assert event_info == ('recover_orphaned_artifact: fake message', 91, 12345, 1,

+                               'module-build-macros', '1.0', "1.{0}".format(dist_tag), 4, None)

+ 

          assert component_build.state == koji.BUILD_STATES["COMPLETE"]

          assert component_build.task_id == 12345

          assert component_build.state_reason == "Found existing build"
@@ -307,10 +301,10 @@ 

          component_build.state = None

          db_session.commit()

  

-         actual = builder.recover_orphaned_artifact(component_build)

+         recovered = builder.recover_orphaned_artifact(component_build)

          db_session.commit()

  

-         assert actual == []

+         assert not recovered

          # Make sure nothing erroneous gets tag

          assert builder.koji_session.tagBuild.call_count == 0

  
@@ -422,7 +416,7 @@ 

          assert mock_session.untagBuild.mock_calls == expected_calls

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_weights(self, ClientSession):

          session = ClientSession.return_value

          session.getLoggedInUser.return_value = {"id": 123}
@@ -448,7 +442,7 @@ 

          session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_weights_no_task_id(self, ClientSession):

          session = ClientSession.return_value

          session.getLoggedInUser.return_value = {"id": 123}
@@ -470,7 +464,7 @@ 

          session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_weights_no_build(self, ClientSession):

          session = ClientSession.return_value

          session.getLoggedInUser.return_value = {"id": 123}
@@ -492,7 +486,7 @@ 

          session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_weights_listBuilds_failed(self, ClientSession):

          session = ClientSession.return_value

          session.getLoggedInUser.return_value = {"id": 123}
@@ -512,7 +506,7 @@ 

          session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_weights_getPackageID_failed(self, ClientSession):

          session = ClientSession.return_value

          session.getLoggedInUser.return_value = {"id": 123}
@@ -528,7 +522,7 @@ 

          session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_weights_getLoggedInUser_failed(self, ClientSession):

          session = ClientSession.return_value

          session.getAverageBuildDuration.return_value = None
@@ -726,7 +720,7 @@ 

              ]

          assert session.createBuildTarget.mock_calls == expected_calls

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_built_rpms_in_module_build(self, ClientSession):

          session = ClientSession.return_value

          session.listTaggedRPMS.return_value = (
@@ -817,7 +811,7 @@ 

              ([], []),

          ),

      )

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_filtered_rpms_on_self_dep(

          self, ClientSession, br_filtered_rpms, expected

      ):
@@ -910,7 +904,7 @@ 

          else:

              mock_koji_cg.koji_import.assert_not_called()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_anonymous_session(self, ClientSession):

          mbs_config = mock.Mock(koji_profile="koji", koji_config="conf/koji.conf")

          session = KojiModuleBuilder.get_session(mbs_config, login=False)
@@ -918,14 +912,14 @@ 

          assert ClientSession.return_value.krb_login.assert_not_called

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          builder = KojiModuleBuilder(db_session, "owner", module_build, conf, "module-tag", [])

          builder.koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_module_build_arches(self, ClientSession):

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          arches = "x86_64 i686 ppc64le aarch64 s390x"
@@ -934,6 +928,35 @@ 

          ret = KojiModuleBuilder.get_module_build_arches(module_build)

          assert " ".join(ret) == arches

  

+     @patch.dict("sys.modules", krbV=MagicMock())

+     @patch("koji.ClientSession")

+     def test_get_module_build_arches_with_archless_tag(self, ClientSession):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         session = ClientSession.return_value

+         session.getTag.return_value = {"arches": ""}

+         ret = KojiModuleBuilder.get_module_build_arches(module_build)

+         assert ret == []

+ 

+     @patch.dict("sys.modules", krbV=MagicMock())

+     @patch("koji.ClientSession")

+     def test_get_module_build_arches_without_tag(self, ClientSession):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         module_build.koji_tag = None

+         session = ClientSession.return_value

+         ret = KojiModuleBuilder.get_module_build_arches(module_build)

+         assert ret == []

+         session.getTag.assert_not_called()

+         session.assert_not_called()

+ 

+     @patch.dict("sys.modules", krbV=MagicMock())

+     @patch("koji.ClientSession")

+     def test_get_module_build_arches_with_unknown_tag(self, ClientSession):

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

+         session = ClientSession.return_value

+         session.getTag.return_value = None

+         with pytest.raises(ValueError, match="Unknown Koji tag .*"):

+             KojiModuleBuilder.get_module_build_arches(module_build)

+ 

  

  class TestGetDistTagSRPM:

      """Test KojiModuleBuilder.get_disttag_srpm"""

file modified
+12 -13
@@ -8,7 +8,6 @@ 

  from os import path

  

  import module_build_service.messaging

- import module_build_service.scheduler.handlers.repos  # noqa

  from module_build_service import models, conf, build_logs, Modulemd

  from module_build_service.db_session import db_session

  from module_build_service.utils.general import mmd_to_str
@@ -73,11 +72,11 @@ 

          except OSError:

              pass

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      @patch("subprocess.Popen")

      @patch("module_build_service.builder.KojiContentGenerator.Modulemd")

      @patch("pkg_resources.get_distribution")

-     @patch("platform.linux_distribution")

+     @patch("distro.linux_distribution")

      @patch("platform.machine")

      @patch(

          "module_build_service.builder.KojiContentGenerator.KojiContentGenerator._koji_rpms_in_tag"
@@ -138,11 +137,11 @@ 

          # Ensure an anonymous Koji session works

          koji_session.krb_login.assert_not_called()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      @patch("subprocess.Popen")

      @patch("module_build_service.builder.KojiContentGenerator.Modulemd")

      @patch("pkg_resources.get_distribution")

-     @patch("platform.linux_distribution")

+     @patch("distro.linux_distribution")

      @patch("platform.machine")

      @patch(

          "module_build_service.builder.KojiContentGenerator.KojiContentGenerator._koji_rpms_in_tag"
@@ -205,7 +204,7 @@ 

              assert len(mmd.read()) == 254

  

      @patch.dict("sys.modules", krbV=Mock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_tag_cg_build(self, ClientSession):

          """ Test that the CG build is tagged. """

          koji_session = ClientSession.return_value
@@ -221,7 +220,7 @@ 

          koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=Mock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_tag_cg_build_fallback_to_default_tag(self, ClientSession):

          """ Test that the CG build is tagged to default tag. """

          koji_session = ClientSession.return_value
@@ -240,7 +239,7 @@ 

          koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=Mock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_tag_cg_build_no_tag_set(self, ClientSession):

          """ Test that the CG build is not tagged when no tag set. """

          koji_session = ClientSession.return_value
@@ -255,7 +254,7 @@ 

          koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=Mock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_tag_cg_build_no_tag_available(self, ClientSession):

          """ Test that the CG build is not tagged when no tag available. """

          koji_session = ClientSession.return_value
@@ -337,7 +336,7 @@ 

              "type": "file",

          }

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_koji_rpms_in_tag(self, ClientSession):

          koji_session = ClientSession.return_value

          koji_session.getUser.return_value = GET_USER_RV
@@ -427,7 +426,7 @@ 

          # Listing tagged RPMs does not require to log into a session

          koji_session.krb_login.assert_not_called()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_koji_rpms_in_tag_empty_tag(self, ClientSession):

          koji_session = ClientSession.return_value

          koji_session.getUser.return_value = GET_USER_RV
@@ -439,7 +438,7 @@ 

          assert rpms == []

          koji_session.multiCall.assert_not_called()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_koji_rpms_in_tag_empty_headers(self, ClientSession):

          koji_session = ClientSession.return_value

          koji_session.getUser.return_value = GET_USER_RV
@@ -966,7 +965,7 @@ 

              assert "%s:%s" % (mmd.get_module_name(), mmd.get_stream_name()) in requires

  

      @patch.dict("sys.modules", krbV=Mock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      @patch("module_build_service.builder.KojiContentGenerator.KojiContentGenerator._tag_cg_build")

      @patch("module_build_service.builder.KojiContentGenerator.KojiContentGenerator._load_koji_tag")

      def test_koji_cg_koji_import(self, tag_loader, tagger, cl_session):

file modified
+6 -6
@@ -149,7 +149,7 @@ 

          finally:

              app.config["SQLALCHEMY_DATABASE_URI"] = original_db_uri

  

-     @patch("module_build_service.scheduler.main")

+     @patch("module_build_service.scheduler.local.main")

      def test_set_stream(self, main):

          cli_cmd = [

              "mbs-manager", "build_module_locally",
@@ -159,9 +159,9 @@ 

  

          self._run_manager_wrapper(cli_cmd)

  

-         # Since module_build_service.scheduler.main is mocked, MBS does not

-         # really build the testmodule for this test. Following lines assert the

-         # fact:

+         # Since module_build_service.scheduler.local.main is mocked, MBS does

+         # not really build the testmodule for this test. Following lines assert

+         # the fact:

          # Module testmodule-local-build is expanded and stored into database,

          # and this build has buildrequires platform:f28 and requires

          # platform:f28.
@@ -201,7 +201,7 @@ 

              "--file", staged_data_filename("testmodule-local-build.yaml")

          ]

  

-         def main_side_effect(initial_messages, stop_condition):

+         def main_side_effect(module_build_ids):

              build = db_session.query(models.ModuleBuild).filter(

                  models.ModuleBuild.name == "testmodule-local-build"

              ).first()
@@ -211,7 +211,7 @@ 

          # We don't run consumer actually, but it could be patched to mark some

          # module build failed for test purpose.

  

-         with patch("module_build_service.scheduler.main",

+         with patch("module_build_service.scheduler.local.main",

                     side_effect=main_side_effect):

              with pytest.raises(RuntimeError, match="Module build failed"):

                  self._run_manager_wrapper(cli_cmd)

file modified
+13 -9
@@ -1,7 +1,8 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

+ 

  from module_build_service import messaging

- from module_build_service.messaging import KojiRepoChange  # noqa

+ from module_build_service.scheduler.parser import FedmsgMessageParser

  

  

  class TestFedmsgMessaging:
@@ -25,10 +26,11 @@ 

              "topic": "org.fedoraproject.prod.buildsys.build.state.change",

          }

  

-         msg = messaging.FedmsgMessageParser().parse(buildsys_state_change_msg)

+         parser = FedmsgMessageParser(messaging.known_fedmsg_services)

+         event_info = parser.parse(buildsys_state_change_msg)

  

-         assert msg.build_id == 614503

-         assert msg.build_new_state == 1

+         assert event_info["build_id"] == 614503

+         assert event_info["build_new_state"] == 1

  

      def test_buildsys_tag(self):

          # https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134
@@ -49,10 +51,11 @@ 

              "topic": "org.fedoraproject.prod.buildsys.tag",

          }

  

-         msg = messaging.FedmsgMessageParser().parse(buildsys_tag_msg)

+         parser = FedmsgMessageParser(messaging.known_fedmsg_services)

+         event_info = parser.parse(buildsys_tag_msg)

  

-         assert msg.tag == "module-debugging-tools-master-20170405115403-build"

-         assert msg.artifact == "module-build-macros"

+         assert event_info["tag_name"] == "module-debugging-tools-master-20170405115403-build"

+         assert event_info["build_name"] == "module-build-macros"

  

      def test_buildsys_repo_done(self):

          # https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134
@@ -68,6 +71,7 @@ 

              "topic": "org.fedoraproject.prod.buildsys.repo.done",

          }

  

-         msg = messaging.FedmsgMessageParser().parse(buildsys_tag_msg)

+         parser = FedmsgMessageParser(messaging.known_fedmsg_services)

+         event_info = parser.parse(buildsys_tag_msg)

  

-         assert msg.repo_tag == "module-f0f7e44f3c6cccab-build"

+         assert event_info["repo_tag"] == "module-f0f7e44f3c6cccab-build"

@@ -64,7 +64,7 @@ 

              "testmodule:master:20170109091357:7c29193d",

              "testmodule:master:20170109091357:7c29193e"}

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_buildrequired_modulemds_name_not_tagged(self, ClientSession):

          koji_session = ClientSession.return_value

          koji_session.getLastEvent.return_value = {"id": 123}
@@ -82,7 +82,7 @@ 

          koji_session.listTagged.assert_called_with(

              "foo-test", inherit=True, package="testmodule", type="module", event=123)

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_buildrequired_modulemds_multiple_streams(self, ClientSession):

          koji_session = ClientSession.return_value

  
@@ -108,7 +108,7 @@ 

          nsvcs = {m.get_nsvc() for m in result}

          assert nsvcs == {"testmodule:master:20170109091357:7c29193d"}

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_buildrequired_modulemds_tagged_but_not_in_db(self, ClientSession):

          koji_session = ClientSession.return_value

  
@@ -135,7 +135,7 @@ 

          with pytest.raises(ValueError, match=expected_error):

              resolver.get_buildrequired_modulemds("testmodule", "2", platform.mmd())

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_buildrequired_modulemds_multiple_versions_contexts(self, ClientSession):

          koji_session = ClientSession.return_value

  
@@ -172,7 +172,7 @@ 

              "testmodule:master:20170109091357:7c29193d",

              "testmodule:master:20170109091357:7c29193e"}

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_buildrequired_modules(self, ClientSession):

          koji_session = ClientSession.return_value

  
@@ -198,7 +198,7 @@ 

          nvrs = {m.nvr_string for m in result}

          assert nvrs == {"testmodule-master-20170109091357.7c29193d"}

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_filter_inherited(self, ClientSession):

          koji_session = ClientSession.return_value

  

@@ -1,8 +1,8 @@ 

  # -*- coding: utf-8 -*-

  # SPDX-License-Identifier: MIT

  from mock import patch, MagicMock

+ from module_build_service.scheduler import events

  from module_build_service.scheduler.consumer import MBSConsumer

- from module_build_service.messaging import KojiTagChange, KojiRepoChange

  

  

  class TestConsumer:
@@ -35,11 +35,11 @@ 

                  "release": "1.el7",

              },

          }

-         msg_obj = consumer.get_abstracted_msg(msg)

-         assert isinstance(msg_obj, KojiTagChange)

-         assert msg_obj.msg_id == msg["msg_id"]

-         assert msg_obj.tag == msg["msg"]["tag"]

-         assert msg_obj.artifact == msg["msg"]["name"]

+         event_info = consumer.get_abstracted_event_info(msg)

+         assert event_info["event"] == events.KOJI_TAG_CHANGE

+         assert event_info["msg_id"] == msg["msg_id"]

+         assert event_info["tag_name"] == msg["msg"]["tag"]

+         assert event_info["build_name"] == msg["msg"]["name"]

  

      @patch("module_build_service.scheduler.consumer.models")

      @patch.object(MBSConsumer, "process_message")
@@ -73,7 +73,7 @@ 

          }

          consumer.consume(msg)

          assert process_message.call_count == 1

-         msg_obj = process_message.call_args[0][0]

-         assert isinstance(msg_obj, KojiRepoChange)

-         assert msg_obj.msg_id == msg["body"]["msg_id"]

-         assert msg_obj.repo_tag == msg["body"]["msg"]["tag"]

+         event_info = process_message.call_args[0][0]

+         assert event_info["event"] == events.KOJI_REPO_CHANGE

+         assert event_info["msg_id"] == msg["body"]["msg_id"]

+         assert event_info["repo_tag"] == msg["body"]["msg"]["tag"]

@@ -52,7 +52,7 @@ 

          "python": "3",

          "ruby": "2.6",

      }

-     defaults_added = default_modules.add_default_modules(mmd, ["x86_64"])

+     defaults_added = default_modules.add_default_modules(mmd)

      # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in

      # the database

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform", "python"}
@@ -72,7 +72,7 @@ 

      clean_database()

      mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}

-     default_modules.add_default_modules(mmd, ["x86_64"])

+     default_modules.add_default_modules(mmd)

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}

      mock_get_dm.assert_not_called()

  
@@ -88,7 +88,7 @@ 

  

      expected_error = "Failed to retrieve the module platform:f28:3:00000000 from the database"

      with pytest.raises(RuntimeError, match=expected_error):

-         default_modules.add_default_modules(mmd, ["x86_64"])

+         default_modules.add_default_modules(mmd)

  

  

  @patch("module_build_service.scheduler.default_modules._get_default_modules")
@@ -136,7 +136,7 @@ 

          "python": "3",

          "ruby": "2.6",

      }

-     defaults_added = default_modules.add_default_modules(mmd, ["x86_64"])

+     defaults_added = default_modules.add_default_modules(mmd)

      # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in

      # the database

      assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform"}
@@ -178,7 +178,7 @@ 

      mock_get_dm.side_effect = ValueError(expected_error)

  

      with pytest.raises(ValueError, match=expected_error):

-         default_modules.add_default_modules(mmd, ["x86_64"])

+         default_modules.add_default_modules(mmd)

  

  

  @pytest.mark.parametrize("is_rawhide", (True, False))

@@ -2,9 +2,10 @@ 

  # SPDX-License-Identifier: MIT

  import pytest

  

- from mock import call, patch, Mock

+ from mock import call, patch, PropertyMock, Mock

  from sqlalchemy import func

  

+ import module_build_service.config

  from module_build_service import conf

  from module_build_service.db_session import db_session

  from module_build_service.models import BUILD_STATES, ModuleBuild
@@ -20,7 +21,7 @@ 

      def setup_method(self, method):

          clean_database()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession):

          ClientSession.return_value.getBuild.return_value = None

  
@@ -37,13 +38,13 @@ 

              {"extra": {"typeinfo": {"module": {}}}},

          ],

      )

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info):

          ClientSession.return_value.getBuild.return_value = build_info

  

          assert get_corresponding_module_build("n-v-r") is None

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession):

          fake_module_build_id, = db_session.query(func.max(ModuleBuild.id)).first()

  
@@ -53,7 +54,7 @@ 

  

          assert get_corresponding_module_build("n-v-r") is None

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_find_the_module_build(self, ClientSession):

          expected_module_build = (

              db_session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()
@@ -72,10 +73,27 @@ 

  class TestDecisionUpdateHandler:

      """Test handler decision_update"""

  

+     def setup_method(self, test_method):

+         self.patch_config_broker = patch.object(

+             module_build_service.config.Config,

+             "celery_broker_url",

+             create=True,

+             new_callable=PropertyMock,

+             return_value=False,

+         )

+         self.patch_config_broker.start()

+ 

+     def teardown_method(self, test_method):

+         self.patch_config_broker.stop()

+ 

      @patch("module_build_service.scheduler.handlers.greenwave.log")

      def test_decision_context_is_not_match(self, log):

-         msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing")

-         decision_update(conf, msg)

+         decision_update(

+             msg_id="msg-id-1",

+             decision_context="bodhi_update_push_testing",

+             policies_satisfied=True,

+             subject_identifier="xxx",

+         )

          log.debug.assert_called_once_with(

              'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"',

              "msg-id-1",
@@ -84,20 +102,19 @@ 

  

      @patch("module_build_service.scheduler.handlers.greenwave.log")

      def test_not_satisfy_policies(self, log):

-         msg = Mock(

+         subject_identifier = "pkg-0.1-1.c1"

+         decision_update(

              msg_id="msg-id-1",

              decision_context="test_dec_context",

              policies_satisfied=False,

-             subject_identifier="pkg-0.1-1.c1",

-         )

-         decision_update(conf, msg)

+             subject_identifier=subject_identifier)

          log.debug.assert_called_once_with(

              "Skip to handle module build %s because it has not satisfied Greenwave policies.",

-             msg.subject_identifier,

+             subject_identifier,

          )

  

      @patch("module_build_service.messaging.publish")

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_transform_from_done_to_ready(self, ClientSession, publish):

          clean_database()

  

@@ -12,6 +12,7 @@ 

  from module_build_service.db_session import db_session

  from module_build_service.models import ModuleBuild

  from module_build_service.utils.general import mmd_to_str, load_mmd

+ # from module_build_service.scheduler.events import MBSModule

  

  

  class TestModuleInit:
@@ -70,11 +71,7 @@ 

          platform_build.modulemd = mmd_to_str(mmd)

          db_session.commit()

  

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=2, module_build_state="init"

-         )

- 

-         self.fn(config=conf, msg=msg)

+         self.fn(msg_id="msg-id-1", module_build_id=2, module_build_state="init")

  

          build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the wait state
@@ -114,9 +111,7 @@ 

              get_latest_error=RuntimeError("Failed in mocked_scm_get_latest")

          )

  

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=2, module_build_state="init")

-         self.fn(config=conf, msg=msg)

+         self.fn(msg_id="msg-id-1", module_build_id=2, module_build_state="init")

  

          build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the failed state
@@ -141,9 +136,7 @@ 

          scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886"

          ModuleBuild.create(

              db_session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=3, module_build_state="init")

-         self.fn(config=conf, msg=msg)

+         self.fn(msg_id="msg-id-1", module_build_id=3, module_build_state="init")

          build = ModuleBuild.get_by_id(db_session, 3)

          assert build.state == 1

          assert build.name == "includemodule"
@@ -177,12 +170,11 @@ 

              "7035bd33614972ac66559ac1fdd019ff6027ad22",

              get_latest_raise=True,

          )

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=2, module_build_state="init")

+ 

          build = ModuleBuild.get_by_id(db_session, 2)

          mocked_from_module_event.return_value = build

  

-         self.fn(config=conf, msg=msg)

+         self.fn(msg_id="msg-id-1", module_build_id=2, module_build_state="init")

  

          # Query the database again to make sure the build object is updated

          db_session.refresh(build)

@@ -22,7 +22,6 @@ 

  

          self.config = conf

          self.session = mock.Mock()

-         self.fn = module_build_service.scheduler.handlers.modules.wait

  

      def teardown_method(self, test_method):

          try:
@@ -40,12 +39,11 @@ 

          create_builder.return_value = builder

  

          module_build_id = db_session.query(ModuleBuild).first().id

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None,

-             module_build_id=module_build_id,

-             module_build_state="some state")

          with patch("module_build_service.resolver.GenericResolver.create"):

-             self.fn(config=self.config, msg=msg)

+             module_build_service.scheduler.handlers.modules.wait(

+                 msg_id="msg-id-1",

+                 module_build_id=module_build_id,

+                 module_build_state="some state")

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -80,11 +78,10 @@ 

          resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

          generic_resolver.create.return_value = resolver

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=2, module_build_state="some state")

  

          module_build_service.scheduler.handlers.modules.wait(

-             config=conf, msg=msg)

+             msg_id="msg-id-1",

+             module_build_id=2, module_build_state="some state")

  

          koji_session.newRepo.assert_called_once_with("module-123-build")

  
@@ -127,11 +124,11 @@ 

          resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

          generic_resolver.create.return_value = resolver

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=2, module_build_state="some state")

  

          module_build_service.scheduler.handlers.modules.wait(

-             config=conf, msg=msg)

+             msg_id="msg-id-1",

+             module_build_id=2,

+             module_build_state="some state")

  

          assert koji_session.newRepo.called

  
@@ -173,11 +170,11 @@ 

          }

  

          generic_resolver.create.return_value = resolver

-         msg = module_build_service.messaging.MBSModule(

-             msg_id=None, module_build_id=2, module_build_state="some state")

  

          module_build_service.scheduler.handlers.modules.wait(

-             config=conf, msg=msg)

+             msg_id="msg-id-1",

+             module_build_id=2,

+             module_build_state="some state")

  

          module_build = ModuleBuild.get_by_id(db_session, 2)

          assert module_build.cg_build_koji_tag == "modular-updates-candidate"
@@ -244,11 +241,9 @@ 

              new=koji_cg_tag_build,

          ):

              generic_resolver.create.return_value = resolver

-             msg = module_build_service.messaging.MBSModule(

-                 msg_id=None, module_build_id=2, module_build_state="some state"

-             )

              module_build_service.scheduler.handlers.modules.wait(

-                 config=conf, msg=msg

-             )

+                 msg_id="msg-id-1",

+                 module_build_id=2,

+                 module_build_state="some state")

              module_build = ModuleBuild.get_by_id(db_session, 2)

              assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

@@ -2,15 +2,13 @@ 

  # SPDX-License-Identifier: MIT

  import re

  import pytest

- from mock import patch

+ from mock import call, patch

  from module_build_service import models, conf

  from tests import clean_database, make_module_in_db

  import mock

  import koji

- from module_build_service.scheduler.producer import MBSProducer

- from module_build_service.messaging import KojiTagChange

  from module_build_service.db_session import db_session

- import six.moves.queue as queue

+ from module_build_service.scheduler import producer

  from datetime import datetime, timedelta

  

  
@@ -19,7 +17,6 @@ 

      "module_build_service.builder.GenericBuilder.default_buildroot_groups",

      return_value={"build": [], "srpm-build": []},

  )

- @patch("module_build_service.scheduler.consumer.get_global_consumer")

  @patch("module_build_service.builder.GenericBuilder.create_from_module")

  class TestPoller:

      def setup_method(self, test_method):
@@ -40,15 +37,11 @@ 

      @pytest.mark.parametrize("fresh", [True, False])

      @patch("module_build_service.utils.batches.start_build_component")

      def test_process_paused_module_builds(

-         self, start_build_component, create_builder, global_consumer, dbg, fresh

+         self, start_build_component, create_builder, dbg, fresh

      ):

          """

          Tests general use-case of process_paused_module_builds.

          """

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          builder = mock.MagicMock()

          create_builder.return_value = builder

  
@@ -64,9 +57,7 @@ 

          db_session.commit()

  

          # Poll :)

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

-         poller.poll()

+         producer.process_paused_module_builds()

  

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -92,16 +83,12 @@ 

      ))

      @patch("module_build_service.utils.batches.start_build_component")

      def test_process_paused_module_builds_with_new_repo_task(

-         self, start_build_component, create_builder, global_consumer, dbg, task_state,

+         self, start_build_component, create_builder, dbg, task_state,

          expect_start_build_component

      ):

          """

          Tests general use-case of process_paused_module_builds.

          """

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          builder = mock.MagicMock()

          create_builder.return_value = builder

  
@@ -118,9 +105,7 @@ 

          db_session.commit()

  

          # Poll :)

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

-         poller.poll()

+         producer.process_paused_module_builds()

  

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -138,17 +123,11 @@ 

          assert len(start_build_component.mock_calls) == expected_build_calls

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_retrigger_new_repo_on_failure(

-         self, ClientSession, create_builder, global_consumer, dbg

-     ):

+     @patch("koji.ClientSession")

+     def test_retrigger_new_repo_on_failure(self, ClientSession, create_builder, dbg):

          """

          Tests that we call koji_sesion.newRepo when newRepo task failed.

          """

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          koji_session = ClientSession.return_value

          koji_session.getTag = lambda tag_name: {"name": tag_name}

          koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["FAILED"]}
@@ -165,26 +144,18 @@ 

          module_build.new_repo_task_id = 123456

          db_session.commit()

  

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

-         poller.poll()

+         producer.retrigger_new_repo_on_failure()

  

          koji_session.newRepo.assert_called_once_with(

              "module-testmodule-master-20170219191323-c40c156c-build")

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_trigger_new_repo_when_succeeded(

-         self, ClientSession, create_builder, global_consumer, dbg

-     ):

+     @patch("koji.ClientSession")

+     def test_trigger_new_repo_when_succeeded(self, ClientSession, create_builder, dbg):

          """

          Tests that we do not call koji_sesion.newRepo when newRepo task

          succeeded.

          """

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          koji_session = ClientSession.return_value

          koji_session.getTag = lambda tag_name: {"name": tag_name}

          koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
@@ -201,26 +172,18 @@ 

          module_build.new_repo_task_id = 123456

          db_session.commit()

  

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

-         poller.poll()

+         producer.retrigger_new_repo_on_failure()

  

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  

          assert not koji_session.newRepo.called

          assert module_build.new_repo_task_id == 123456

  

-     def test_process_paused_module_builds_waiting_for_repo(

-         self, create_builder, global_consumer, dbg

-     ):

+     def test_process_paused_module_builds_waiting_for_repo(self, create_builder, dbg):

          """

          Tests that process_paused_module_builds does not start new batch

          when we are waiting for repo.

          """

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          builder = mock.MagicMock()

          create_builder.return_value = builder

  
@@ -232,9 +195,7 @@ 

          db_session.commit()

  

          # Poll :)

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

-         poller.poll()

+         producer.process_paused_module_builds()

  

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -244,14 +205,10 @@ 

              assert component.state is None

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_old_build_targets_are_not_associated_with_any_module_builds(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, dbg

      ):

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.

          koji_session.getBuildTargets.return_value = [
@@ -259,16 +216,14 @@ 

              {"dest_tag_name": "module-yyy-2"},

          ]

  

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

-         poller.delete_old_koji_targets(conf)

+         producer.delete_old_koji_targets()

  

          koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_dont_delete_base_module_build_target(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, dbg

      ):

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -276,24 +231,16 @@ 

          # No created module build has any of these tags.

          koji_session.getBuildTargets.return_value = [{"dest_tag_name": module_build.koji_tag}]

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          # If module build's name is one of base module names, build target

          # should not be deleted.

          with patch.object(conf, "base_module_names", new=[module_build.name]):

- 

-             hub = mock.MagicMock()

-             poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf)

- 

+             producer.delete_old_koji_targets()

              koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_dont_delete_build_target_for_unfinished_module_builds(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, dbg

      ):

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

  
@@ -301,26 +248,20 @@ 

          # No created module build has any of these tags.

          koji_session.getBuildTargets.return_value = [{"dest_tag_name": module_build.koji_tag}]

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          # Each time when a module build is in one of these state, build target

          # should not be deleted.

          for state in ["init", "wait", "build"]:

              module_build.state = state

              db_session.commit()

  

-             hub = mock.MagicMock()

-             poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf)

+             producer.delete_old_koji_targets()

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_only_delete_build_target_with_allowed_koji_tag_prefix(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, dbg

      ):

          module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.
@@ -343,23 +284,17 @@ 

              {"id": 2, "dest_tag_name": module_build_3.koji_tag, "name": module_build_3.koji_tag},

          ]

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          with patch.object(conf, "koji_tag_prefixes", new=["module", "another-prefix"]):

              with patch.object(conf, "koji_target_delete_time", new=60):

-                 hub = mock.MagicMock()

-                 poller = MBSProducer(hub)

-                 poller.delete_old_koji_targets(conf)

+                 producer.delete_old_koji_targets()

  

              koji_session.deleteBuildTarget.assert_called_once_with(1)

              koji_session.krb_login.assert_called_once()

  

      @patch.dict("sys.modules", krbV=mock.MagicMock())

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_cant_delete_build_target_if_not_reach_delete_time(

-         self, ClientSession, create_builder, global_consumer, dbg

+         self, ClientSession, create_builder, dbg

      ):

          module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.
@@ -377,31 +312,22 @@ 

              {"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag}

          ]

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

          with patch.object(conf, "koji_tag_prefixes", new=["module"]):

              # Use default koji_target_delete_time in config. That time is long

              # enough for test.

-             hub = mock.MagicMock()

-             poller = MBSProducer(hub)

-             poller.delete_old_koji_targets(conf)

+             producer.delete_old_koji_targets()

  

              koji_session.deleteBuildTarget.assert_not_called()

  

      @pytest.mark.parametrize("state", ["init", "wait"])

-     def test_process_waiting_module_build(

-         self, create_builder, global_consumer, dbg, state

-     ):

+     @patch.dict(producer.ON_MODULE_CHANGE_HANDLERS, clear=True, values={

+         models.BUILD_STATES["init"]: mock.Mock(),

+         models.BUILD_STATES["wait"]: mock.Mock(),

+     })

+     def test_process_waiting_module_build(self, create_builder, dbg, state):

          """ Test that processing old waiting module builds works. """

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

+         handler = producer.ON_MODULE_CHANGE_HANDLERS[models.BUILD_STATES[state]]

  

          # Change the batch to 2, so the module build is in state where

          # it is not building anything, but the state is "build".
@@ -413,32 +339,32 @@ 

          db_session.commit()

          db_session.refresh(module_build)

  

-         # Ensure the queue is empty before we start.

-         assert consumer.incoming.qsize() == 0

- 

          # Poll :)

-         poller.process_waiting_module_builds()

+         producer.process_waiting_module_builds()

  

-         assert consumer.incoming.qsize() == 1

+         handler.delay.assert_called_once_with(

+             "internal:mbs.module.state.change",

+             module_build.id,

+             module_build.state

+         )

  

          db_session.refresh(module_build)

          # ensure the time_modified was changed.

          assert module_build.time_modified > original

  

      @pytest.mark.parametrize("state", ["init", "wait"])

+     @patch.dict(producer.ON_MODULE_CHANGE_HANDLERS, clear=True, values={

+         models.BUILD_STATES["init"]: mock.Mock(),

+         models.BUILD_STATES["wait"]: mock.Mock(),

+     })

      def test_process_waiting_module_build_not_old_enough(

-         self, create_builder, global_consumer, dbg, state

+         self, create_builder, dbg, state

      ):

          """ Test that we do not process young waiting builds. """

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

+         handler = producer.ON_MODULE_CHANGE_HANDLERS[models.BUILD_STATES[state]]

  

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

- 

-         # Change the batch to 2, so the module build is in state where

+         # Change the batch to build, so the module build is in state where

          # it is not building anything, but the state is "build".

          module_build = models.ModuleBuild.get_by_id(db_session, 3)

          module_build.state = models.BUILD_STATES[state]
@@ -448,37 +374,25 @@ 

          db_session.commit()

          db_session.refresh(module_build)

  

-         # Ensure the queue is empty before we start.

-         assert consumer.incoming.qsize() == 0

- 

          # Poll :)

-         poller.process_waiting_module_builds()

+         producer.process_waiting_module_builds()

  

-         # Ensure we did *not* process the 9 minute-old build.

-         assert consumer.incoming.qsize() == 0

+         handler.assert_not_called()

  

-     def test_process_waiting_module_build_none_found(

-         self, create_builder, global_consumer, dbg

-     ):

+     @patch.dict(producer.ON_MODULE_CHANGE_HANDLERS, clear=True, values={

+         models.BUILD_STATES["init"]: mock.Mock(),

+         models.BUILD_STATES["wait"]: mock.Mock(),

+     })

+     def test_process_waiting_module_build_none_found(self, create_builder, dbg):

          """ Test nothing happens when no module builds are waiting. """

- 

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

- 

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

- 

-         # Ensure the queue is empty before we start.

-         assert consumer.incoming.qsize() == 0

- 

          # Poll :)

-         poller.process_waiting_module_builds()

+         producer.process_waiting_module_builds()

  

          # Ensure we did *not* process any of the non-waiting builds.

-         assert consumer.incoming.qsize() == 0

+         for handler in producer.ON_MODULE_CHANGE_HANDLERS.values():

+             handler.assert_not_called()

  

-     def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg):

+     def test_cleanup_stale_failed_builds(self, create_builder, dbg):

          """ Test that one of the two module builds gets to the garbage state when running

          cleanup_stale_failed_builds.

          """
@@ -502,15 +416,8 @@ 

  

          db_session.commit()

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

+         producer.cleanup_stale_failed_builds()

  

-         # Ensure the queue is empty before we start

-         assert consumer.incoming.qsize() == 0

-         poller.cleanup_stale_failed_builds(conf)

          db_session.refresh(module_build_two)

          # Make sure module_build_one was transitioned to garbage

          assert module_build_one.state == models.BUILD_STATES["garbage"]
@@ -533,9 +440,7 @@ 

              "module-build-macros-0.1-1.module+0+d027b723",

          ])

  

-     def test_cleanup_stale_failed_builds_no_components(

-         self, create_builder, global_consumer, dbg

-     ):

+     def test_cleanup_stale_failed_builds_no_components(self, create_builder, dbg):

          """ Test that a module build without any components built gets to the garbage state when

          running cleanup_stale_failed_builds.

          """
@@ -555,15 +460,8 @@ 

  

          db_session.commit()

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

+         producer.cleanup_stale_failed_builds()

  

-         # Ensure the queue is empty before we start

-         assert consumer.incoming.qsize() == 0

-         poller.cleanup_stale_failed_builds(conf)

          db_session.refresh(module_build_two)

          # Make sure module_build_two was transitioned to garbage

          assert module_build_two.state == models.BUILD_STATES["garbage"]
@@ -580,9 +478,7 @@ 

      @pytest.mark.parametrize(

          "test_state", [models.BUILD_STATES[state] for state in conf.cleanup_stuck_builds_states]

      )

-     def test_cancel_stuck_module_builds(

-         self, create_builder, global_consumer, dbg, test_state

-     ):

+     def test_cancel_stuck_module_builds(self, create_builder, dbg, test_state):

  

          module_build1 = models.ModuleBuild.get_by_id(db_session, 1)

          module_build1.state = test_state
@@ -601,26 +497,20 @@ 

  

          db_session.commit()

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

- 

-         assert consumer.incoming.qsize() == 0

- 

-         poller.cancel_stuck_module_builds(conf)

+         producer.cancel_stuck_module_builds()

  

-         module = db_session.query(models.ModuleBuild).filter_by(state=4).all()

+         module = models.ModuleBuild.by_state(db_session, "failed")

          assert len(module) == 1

          assert module[0].id == 2

  

      @pytest.mark.parametrize("tagged", (True, False))

      @pytest.mark.parametrize("tagged_in_final", (True, False))

      @pytest.mark.parametrize("btime", (True, False))

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

+     @patch("module_build_service.scheduler.producer.tagged")

      def test_sync_koji_build_tags(

-         self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime

+         self, tagged_handler, ClientSession, create_builder, dbg,

+         tagged, tagged_in_final, btime

      ):

          module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)

          # Only module build 1's build target should be deleted.
@@ -639,48 +529,35 @@ 

  

          koji_session = ClientSession.return_value

          # No created module build has any of these tags.

-         ret = []

  

-         if btime:

-             if tagged:

-                 ret.append({"id": 1, "name": module_build_2.koji_tag + "-build"})

-             if tagged_in_final:

-                 ret.append({"id": 2, "name": module_build_2.koji_tag})

-         koji_session.listTags.return_value = ret

- 

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

- 

-         assert consumer.incoming.qsize() == 0

+         listtags_return_value = []

+         expected_tagged_calls = []

  

-         poller.sync_koji_build_tags(conf)

- 

-         assert consumer.incoming.qsize() == len(ret)

- 

-         expected_msg_tags = []

          if btime:

              if tagged:

-                 expected_msg_tags.append(module_build_2.koji_tag + "-build")

+                 listtags_return_value.append(

+                     {"id": 1, "name": module_build_2.koji_tag + "-build"})

+                 expected_tagged_calls.append(call(

+                     "internal:sync_koji_build_tags",

+                     module_build_2.koji_tag + "-build", c.package, c.nvr

+                 ))

              if tagged_in_final:

-                 expected_msg_tags.append(module_build_2.koji_tag)

+                 listtags_return_value.append(

+                     {"id": 2, "name": module_build_2.koji_tag})

+                 expected_tagged_calls.append(call(

+                     "internal:sync_koji_build_tags",

+                     module_build_2.koji_tag, c.package, c.nvr

+                 ))

+         koji_session.listTags.return_value = listtags_return_value

  

-         assert len(expected_msg_tags) == consumer.incoming.qsize()

+         producer.sync_koji_build_tags()

  

-         for i in range(consumer.incoming.qsize()):

-             msg = consumer.incoming.get()

-             assert isinstance(msg, KojiTagChange)

-             assert msg.artifact == c.package

-             assert msg.nvr == c.nvr

-             assert msg.tag in expected_msg_tags

+         tagged_handler.delay.assert_has_calls(

+             expected_tagged_calls, any_order=True)

  

      @pytest.mark.parametrize("greenwave_result", [True, False])

      @patch("module_build_service.utils.greenwave.Greenwave.check_gating")

-     def test_poll_greenwave(

-         self, mock_gw, create_builder, global_consumer, dbg, greenwave_result

-     ):

+     def test_poll_greenwave(self, mock_gw, create_builder, dbg, greenwave_result):

  

          module_build1 = models.ModuleBuild.get_by_id(db_session, 1)

          module_build1.state = models.BUILD_STATES["ready"]
@@ -697,17 +574,9 @@ 

  

          db_session.commit()

  

-         consumer = mock.MagicMock()

-         consumer.incoming = queue.Queue()

-         global_consumer.return_value = consumer

-         hub = mock.MagicMock()

-         poller = MBSProducer(hub)

- 

-         assert consumer.incoming.qsize() == 0

- 

          mock_gw.return_value = greenwave_result

  

-         poller.poll_greenwave(conf)

+         producer.poll_greenwave()

  

          mock_gw.assert_called_once()

          modules = models.ModuleBuild.by_state(db_session, "ready")

@@ -7,21 +7,21 @@ 

  import module_build_service.models

  from module_build_service.db_session import db_session

  from module_build_service.models import ComponentBuild

- from tests import conf, scheduler_init_data

+ from tests import scheduler_init_data

  

  

  class TestRepoDone:

  

-     @mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event")

-     def test_no_match(self, from_repo_done_event):

+     @mock.patch("module_build_service.models.ModuleBuild.get_by_tag")

+     def test_no_match(self, get_by_tag):

          """ Test that when a repo msg hits us and we have no match,

          that we do nothing gracefully.

          """

          scheduler_init_data()

-         from_repo_done_event.return_value = None

-         msg = module_build_service.messaging.KojiRepoChange(

-             "no matches for this...", "2016-some-nonexistent-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

+         get_by_tag.return_value = None

+         module_build_service.scheduler.handlers.repos.done(

+             msg_id="no matches for this...",

+             repo_tag="2016-some-nonexistent-build")

  

      @mock.patch(

          "module_build_service.builder.KojiModuleBuilder."
@@ -56,9 +56,9 @@ 

          get_session.return_value = mock.Mock(), "development"

          build_fn.return_value = 1234, 1, "", None

  

-         msg = module_build_service.messaging.KojiRepoChange(

-             "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             msg_id="some_msg_id",

+             repo_tag="module-testmodule-master-20170109091357-7c29193d-build")

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -116,9 +116,9 @@ 

  

          finalizer.side_effect = mocked_finalizer

  

-         msg = module_build_service.messaging.KojiRepoChange(

-             "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             msg_id="some_msg_id",

+             repo_tag="module-testmodule-master-20170109091357-7c29193d-build")

  

          finalizer.assert_called_once()

  
@@ -156,9 +156,10 @@ 

          config.return_value = mock.Mock(), "development"

          build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None

  

-         msg = module_build_service.messaging.KojiRepoChange(

-             "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             msg_id="some_msg_id",

+             repo_tag="module-testmodule-master-20170109091357-7c29193d-build")

+ 

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -182,10 +183,9 @@ 

          component_build.tagged = False

          db_session.commit()

  

-         msg = module_build_service.messaging.KojiRepoChange(

-             "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

- 

-         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             msg_id="some_msg_id",

+             repo_tag="module-testmodule-master-20170109091357-7c29193d-build")

  

          mock_log_info.assert_called_with(

              "Ignoring repo regen, because not all components are tagged."
@@ -222,9 +222,9 @@ 

          config.return_value = mock.Mock(), "development"

          build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None

  

-         msg = module_build_service.messaging.KojiRepoChange(

-             "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(

+             msg_id="some_msg_id",

+             repo_tag="module-testmodule-master-20170109091357-7c29193d-build")

  

          module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

@@ -9,7 +9,7 @@ 

  import module_build_service.scheduler.handlers.repos

  import module_build_service.scheduler.handlers.tags

  import module_build_service.models

- from tests import conf

+ 

  from module_build_service.db_session import db_session

  

  import koji
@@ -18,29 +18,28 @@ 

  @pytest.mark.usefixtures("reuse_component_init_data")

  class TestTagTagged:

  

-     @mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event")

-     def test_no_matching_module(self, from_tag_change_event):

+     @mock.patch("module_build_service.models.ModuleBuild.get_by_tag")

+     def test_no_matching_module(self, get_by_tag):

          """ Test that when a tag msg hits us and we have no match,

          that we do nothing gracefully.

          """

-         from_tag_change_event.return_value = None

-         msg = module_build_service.messaging.KojiTagChange(

-             "no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1")

+         get_by_tag.return_value = None

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="no matches for this...",

+             tag_name="2016-some-nonexistent-build",

+             build_name="artifact",

+             build_nvr="artifact-1.2-1")

  

      def test_no_matching_artifact(self):

          """ Test that when a tag msg hits us and we have no match,

          that we do nothing gracefully.

          """

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "artifact",

-             "artifact-1.2-1",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="artifact",

+             build_nvr="artifact-1.2-1",

+         )

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -86,24 +85,18 @@ 

          db_session.commit()

  

          # Tag the first component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

          )

          # Tag the first component to the final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

          )

  

          # newRepo should not be called, because there are still components
@@ -111,14 +104,11 @@ 

          assert not koji_session.newRepo.called

  

          # Tag the second component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

          )

  

          # newRepo should not be called, because the component has not been
@@ -126,14 +116,12 @@ 

          assert not koji_session.newRepo.called

  

          # Tag the first component to the final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

+         )

  

          # newRepo should be called now - all components have been tagged.

          koji_session.newRepo.assert_called_once_with(
@@ -182,23 +170,19 @@ 

          db_session.commit()

  

          # Tag the perl-List-Compare component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the perl-List-Compare component to final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

          )

+         # Tag the perl-List-Compare component to final tag.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

+         )

  

          # newRepo should not be called, because perl-List-Compare has not been

          # built yet.
@@ -252,24 +236,19 @@ 

          db_session.commit()

  

          # Tag the perl-List-Compare component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

          )

          # Tag the perl-List-Compare component to final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

+         )

  

          # newRepo should be called now - all successfully built

          # components have been tagged.
@@ -328,69 +307,57 @@ 

          db_session.commit()

  

          # Tag the first component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the first component to the final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

          )

+         # Tag the first component to the final tag.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

+         )

  

          # newRepo should not be called, because there are still components

          # to tag.

          assert not koji_session.newRepo.called

  

          # Tag the second component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the second component to final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

          )

+         # Tag the second component to final tag.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

+         )

  

          # newRepo should not be called, because there are still components

          # to tag.

          assert not koji_session.newRepo.called

  

          # Tag the component from first batch to final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "module-build-macros",

-             "module-build-macros-0.1-1.module+0+b0a1d1f7",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the component from first batch to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "module-build-macros",

-             "module-build-macros-0.1-1.module+0+b0a1d1f7",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="module-build-macros",

+             build_nvr="module-build-macros-0.1-1.module+0+b0a1d1f7",

          )

+         # Tag the component from first batch to the buildroot.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="module-build-macros",

+             build_nvr="module-build-macros-0.1-1.module+0+b0a1d1f7",

+         )

  

          # newRepo should be called now - all components have been tagged.

          koji_session.newRepo.assert_called_once_with(
@@ -455,33 +422,27 @@ 

          db_session.commit()

  

          # Tag the perl-Tangerine component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

+         )

          assert not koji_session.newRepo.called

          # Tag the perl-List-Compare component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the perl-List-Compare component to final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

          )

+         # Tag the perl-List-Compare component to final tag.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

+         )

  

          # newRepo should be called now - all successfully built

          # components have been tagged.
@@ -554,41 +515,33 @@ 

          db_session.commit()

  

          # Tag the first component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

-         )

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the first component to the final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-Tangerine",

-             "perl-Tangerine-0.23-1.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

          )

+         # Tag the first component to the final tag.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the second component to the buildroot.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c-build",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-Tangerine",

+             build_nvr="perl-Tangerine-0.23-1.module+0+d027b723",

          )

+         # Tag the second component to the buildroot.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

-         # Tag the second component to the final tag.

-         msg = module_build_service.messaging.KojiTagChange(

-             "id",

-             "module-testmodule-master-20170219191323-c40c156c",

-             "perl-List-Compare",

-             "perl-List-Compare-0.53-5.module+0+d027b723",

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c-build",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

          )

+         # Tag the second component to the final tag.

          module_build_service.scheduler.handlers.tags.tagged(

-             config=conf, msg=msg)

+             msg_id="id",

+             tag_name="module-testmodule-master-20170219191323-c40c156c",

+             build_name="perl-List-Compare",

+             build_nvr="perl-List-Compare-0.53-5.module+0+d027b723",

+         )

  

          # All components are tagged, newRepo should be called if there are no active tasks.

          if expect_new_repo:

@@ -111,7 +111,7 @@ 

      def teardown_method(self, test_method):

          clean_database()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):

          koji_session = ClientSession.return_value

  
@@ -127,7 +127,7 @@ 

          assert [] == modulemds

  

      @patch.object(conf, "koji_tag_prefixes", new=["module"])

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_modulemds(self, ClientSession):

          koji_session = ClientSession.return_value

  
@@ -235,7 +235,7 @@ 

      @patch.object(conf, "base_module_names", new=["platform", "project-platform"])

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      @patch("module_build_service.resolver.GenericResolver.create")

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_add_collision_modules(

          self, ClientSession, resolver_create, get_modulemds_from_ursine_content

      ):

file modified
+149 -41
@@ -29,8 +29,9 @@ 

  import pytest

  import module_build_service.scheduler.handlers.components

  from module_build_service.db_session import db_session

- from module_build_service.builder.base import GenericBuilder

+ from module_build_service.builder import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

+ from module_build_service.scheduler import events

  from module_build_service import Modulemd

  from tests import app

  
@@ -163,6 +164,86 @@ 

          tangerine = get_reusable_component(second_module_build, "tangerine")

          assert bool(tangerine is None) != bool(set_current_arch == set_database_arch)

  

+     @pytest.mark.parametrize(

+         "reuse_component",

+         ["perl-Tangerine", "perl-List-Compare", "tangerine"])

+     @pytest.mark.parametrize(

+         "changed_component",

+         ["perl-Tangerine", "perl-List-Compare", "tangerine"])

+     def test_get_reusable_component_different_batch(

+         self, changed_component, reuse_component

+     ):

+         """

+         Test that we get the correct reuse behavior for the changed-and-after strategy. Changes

+         to earlier batches should prevent reuse, but changes to later batches should not.

+         For context, see https://pagure.io/fm-orchestrator/issue/1298

+         """

+ 

+         if changed_component == reuse_component:

+             # we're only testing the cases where these are different

+             # this case is already covered by test_get_reusable_component_different_component

+             return

+ 

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

+ 

+         # update batch for changed component

+         changed_component = models.ComponentBuild.from_component_name(

+             db_session, changed_component, second_module_build.id)

+         orig_batch = changed_component.batch

+         changed_component.batch = orig_batch + 1

+         db_session.commit()

+ 

+         reuse_component = models.ComponentBuild.from_component_name(

+             db_session, reuse_component, second_module_build.id)

+ 

+         reuse_result = module_build_service.utils.get_reusable_component(

+             second_module_build, reuse_component.package)

+         # Component reuse should only be blocked when an earlier batch has been changed.

+         # In this case, orig_batch is the earliest batch that has been changed (the changed

+         # component has been removed from it and added to the following one).

+         assert bool(reuse_result is None) == bool(reuse_component.batch > orig_batch)

+ 

+     @pytest.mark.parametrize(

+         "reuse_component",

+         ["perl-Tangerine", "perl-List-Compare", "tangerine"])

+     @pytest.mark.parametrize(

+         "changed_component",

+         ["perl-Tangerine", "perl-List-Compare", "tangerine"])

+     def test_get_reusable_component_different_arch_in_batch(

+         self, changed_component, reuse_component

+     ):

+         """

+         Test that we get the correct reuse behavior for the changed-and-after strategy. Changes

+         to the architectures in earlier batches should prevent reuse, but such changes to later

+         batches should not.

+         For context, see https://pagure.io/fm-orchestrator/issue/1298

+         """

+         if changed_component == reuse_component:

+             # we're only testing the cases where these are different

+             # this case is already covered by test_get_reusable_component_different_arches

+             return

+ 

+         second_module_build = models.ModuleBuild.get_by_id(db_session, 3)

+ 

+         # update arch for changed component

+         mmd = second_module_build.mmd()

+         component = mmd.get_rpm_component(changed_component)

+         component.reset_arches()

+         component.add_restricted_arch("i686")

+         second_module_build.modulemd = mmd_to_str(mmd)

+         db_session.commit()

+ 

+         changed_component = models.ComponentBuild.from_component_name(

+             db_session, changed_component, second_module_build.id)

+         reuse_component = models.ComponentBuild.from_component_name(

+             db_session, reuse_component, second_module_build.id)

+ 

+         reuse_result = module_build_service.utils.get_reusable_component(

+             second_module_build, reuse_component.package)

+         # Changing the arch of a component should prevent reuse only when the changed component

+         # is in a batch earlier than the component being considered for reuse.

+         assert bool(reuse_result is None) == bool(reuse_component.batch > changed_component.batch)

+ 

      @pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys())

      def test_get_reusable_component_different_buildrequires_stream(self, rebuild_strategy):

          first_module_build = models.ModuleBuild.get_by_id(db_session, 2)
@@ -264,7 +345,7 @@ 

      def teardown_method(self, test_method):

          clean_database()

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_arches(self, ClientSession):

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": "ppc64le"}
@@ -272,7 +353,7 @@ 

          r = module_build_service.utils.get_build_arches(mmd, conf)

          assert r == ["ppc64le"]

  

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_get_build_arches_no_arch_set(self, ClientSession):

          """

          When no architecture is set in Koji tag, fallback to conf.arches.
@@ -1117,11 +1198,13 @@ 

  class TestBatches:

      def setup_method(self, test_method):

          GenericBuilder.register_backend_class(DummyModuleBuilder)

+         events.scheduler.reset()

  

      def teardown_method(self, test_method):

          # clean_database()

          DummyModuleBuilder.TAGGED_COMPONENTS = []

          GenericBuilder.register_backend_class(KojiModuleBuilder)

+         events.scheduler.reset()

  

      def test_start_next_batch_build_reuse(self, default_buildroot_groups):

          """
@@ -1137,31 +1220,30 @@ 

          module_build.batch = 1

  

          builder = mock.MagicMock()

-         further_work = module_build_service.utils.start_next_batch_build(

+         builder.module_build_tag = {"name": "module-fedora-27-build"}

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2

  

-         # KojiBuildChange messages in further_work should have build_new_state

-         # set to COMPLETE, but the current component build state should be set

-         # to BUILDING, so KojiBuildChange message handler handles the change

-         # properly.

-         for msg in further_work:

-             if type(msg) == module_build_service.messaging.KojiBuildChange:

-                 assert msg.build_new_state == koji.BUILD_STATES["COMPLETE"]

-                 component_build = models.ComponentBuild.from_component_event(db_session, msg)

+         # buildsys.build.state.change messages in further_work should have

+         # build_new_state set to COMPLETE, but the current component build

+         # state should be set to BUILDING, so KojiBuildChange message handler

+         # handles the change properly.

+         for event in events.scheduler.queue:

+             event_info = event[3]

+             if event_info[0].startswith("reuse_component"):

+                 assert event_info[3] == koji.BUILD_STATES["COMPLETE"]

+                 component_build = models.ComponentBuild.from_component_event(

+                     db_session,

+                     task_id=event_info[2],

+                     module_id=event_info[7])

                  assert component_build.state == koji.BUILD_STATES["BUILDING"]

  

          # When we handle these KojiBuildChange messages, MBS should tag all

          # the components just once.

-         for msg in further_work:

-             if type(msg) == module_build_service.messaging.KojiBuildChange:

-                 module_build_service.scheduler.handlers.components.complete(conf, msg)

- 

-         # Since we have reused all the components in the batch, there should

-         # be fake KojiRepoChange message.

-         assert type(further_work[-1]) == module_build_service.messaging.KojiRepoChange

+         events.scheduler.run()

  

          # Check that packages have been tagged just once.

          assert len(DummyModuleBuilder.TAGGED_COMPONENTS) == 2
@@ -1189,20 +1271,27 @@ 

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

  

-         further_work = module_build_service.utils.start_next_batch_build(

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2

  

          # Make sure we only have one message returned for the one reused component

-         assert len(further_work) == 1

+         assert len(events.scheduler.queue) == 1

          # The KojiBuildChange message in further_work should have build_new_state

          # set to COMPLETE, but the current component build state in the DB should be set

          # to BUILDING, so KojiBuildChange message handler handles the change

          # properly.

-         assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         component_build = models.ComponentBuild.from_component_event(db_session, further_work[0])

+         event_info = events.scheduler.queue[0][3]

+         assert event_info == ('reuse_component: fake msg', None, 90276227, 1, 'perl-Tangerine',

+                               '0.23', '1.module+0+d027b723', 3,

+                               'Reused component from previous module build')

+         component_build = models.ComponentBuild.from_component_event(

+             db_session,

+             task_id=event_info[2],

+             module_id=event_info[7],

+         )

          assert component_build.state == koji.BUILD_STATES["BUILDING"]

          assert component_build.package == "perl-Tangerine"

          assert component_build.reused_component_id is not None
@@ -1230,13 +1319,13 @@ 

  

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

-         further_work = module_build_service.utils.start_next_batch_build(

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2

          # No component reuse messages should be returned

-         assert len(further_work) == 0

+         assert len(events.scheduler.queue) == 0

          # Make sure that both components in the batch were submitted

          assert len(mock_sbc.mock_calls) == 2

  
@@ -1277,20 +1366,27 @@ 

  

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

-         further_work = module_build_service.utils.start_next_batch_build(

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

  

          # Batch number should increase

          assert module_build.batch == 2

  

          # Make sure we only have one message returned for the one reused component

-         assert len(further_work) == 1

-         # The KojiBuildChange message in further_work should have build_new_state

-         # set to COMPLETE, but the current component build state in the DB should be set

-         # to BUILDING, so KojiBuildChange message handler handles the change

-         # properly.

-         assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         component_build = models.ComponentBuild.from_component_event(db_session, further_work[0])

+         assert len(events.scheduler.queue) == 1

+         # The buildsys.build.state.change message in further_work should have

+         # build_new_state set to COMPLETE, but the current component build state

+         # in the DB should be set to BUILDING, so the build state change handler

+         # handles the change properly.

+         event_info = events.scheduler.queue[0][3]

+         assert event_info == ('reuse_component: fake msg', None, 90276227, 1,

+                               'perl-Tangerine', '0.23', '1.module+0+d027b723', 3,

+                               'Reused component from previous module build')

+         component_build = models.ComponentBuild.from_component_event(

+             db_session,

+             task_id=event_info[2],

+             module_id=event_info[7],

+         )

          assert component_build.state == koji.BUILD_STATES["BUILDING"]

          assert component_build.package == "perl-Tangerine"

          assert component_build.reused_component_id is not None
@@ -1306,15 +1402,24 @@ 

              db_session, "perl-Tangerine", 3)

          pt_component.state = koji.BUILD_STATES["COMPLETE"]

  

+         events.scheduler.reset()

+ 

          # Start the next build batch

-         further_work = module_build_service.utils.start_next_batch_build(

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

          # Batch number should increase

          assert module_build.batch == 3

          # Verify that tangerine was reused even though perl-Tangerine was rebuilt in the previous

          # batch

-         assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"]

-         component_build = models.ComponentBuild.from_component_event(db_session, further_work[0])

+         event_info = events.scheduler.queue[0][3]

+         assert event_info == ('reuse_component: fake msg', None, 90276315, 1, 'tangerine', '0.22',

+                               '3.module+0+d027b723', 3,

+                               'Reused component from previous module build')

+         component_build = models.ComponentBuild.from_component_event(

+             db_session,

+             task_id=event_info[2],

+             module_id=event_info[7],

+         )

          assert component_build.state == koji.BUILD_STATES["BUILDING"]

          assert component_build.package == "tangerine"

          assert component_build.reused_component_id is not None
@@ -1343,14 +1448,14 @@ 

  

          builder = mock.MagicMock()

          builder.recover_orphaned_artifact.return_value = []

-         further_work = module_build_service.utils.start_next_batch_build(

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

  

          # Batch number should increase.

          assert module_build.batch == 2

  

          # Make sure we don't have any messages returned since no components should be reused

-         assert len(further_work) == 0

+         assert len(events.scheduler.queue) == 0

          # Make sure both components are set to the build state but not reused

          assert pt_component.state == koji.BUILD_STATES["BUILDING"]

          assert pt_component.reused_component_id is None
@@ -1379,7 +1484,7 @@ 

          db_session.commit()

  

          builder = mock.MagicMock()

-         further_work = module_build_service.utils.start_next_batch_build(

+         module_build_service.utils.start_next_batch_build(

              conf, module_build, builder)

  

          # Batch number should not increase.
@@ -1387,7 +1492,8 @@ 

          # Make sure start build was called for the second component which wasn't reused

          mock_sbc.assert_called_once()

          # No further work should be returned

-         assert len(further_work) == 0

+ 

+         assert len(events.scheduler.queue) == 0

  

      def test_start_next_batch_build_repo_building(self, default_buildroot_groups):

          """
@@ -1416,9 +1522,11 @@ 

  class TestLocalBuilds:

      def setup_method(self):

          clean_database()

+         events.scheduler.reset()

  

      def teardown_method(self):

          clean_database()

+         events.scheduler.reset()

  

      def test_load_local_builds_name(self, conf_system, conf_resultsdir):

          module_build_service.utils.load_local_builds("testmodule")
@@ -1647,7 +1755,7 @@ 

          "module_build_service.config.Config.allow_only_compatible_base_modules",

          new_callable=mock.PropertyMock,

      )

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      @patch(

          "module_build_service.config.Config.resolver",

          new_callable=mock.PropertyMock, return_value="koji"

file modified
+19 -11
@@ -26,7 +26,6 @@ 

  from module_build_service.models import ModuleBuild, BUILD_STATES, ComponentBuild

  from module_build_service import version

  import module_build_service.config as mbs_config

- import module_build_service.scheduler.handlers.modules

  import module_build_service.utils.submit

  from module_build_service.utils.general import (

      import_mmd, mmd_to_str, load_mmd,
@@ -450,7 +449,7 @@ 

                      assert item[key] == part

  

      @pytest.mark.usefixtures("reuse_component_init_data")

-     @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

+     @patch("koji.ClientSession")

      def test_query_builds_with_binary_rpm(self, ClientSession):

          """

          Test for querying MBS with the binary rpm filename. MBS should return all the modules,
@@ -1253,18 +1252,25 @@ 

          assert data["state"] == 4

          assert data["state_reason"] == "Canceled by some_other_user."

  

+     @pytest.mark.parametrize("module_state", (BUILD_STATES["failed"], BUILD_STATES["ready"]))

      @patch("module_build_service.auth.get_user", return_value=other_user)

-     def test_cancel_build_already_failed(self, mocked_get_user):

+     def test_cancel_build_in_invalid_state(self, mocked_get_user, module_state):

          module = ModuleBuild.get_by_id(db_session, 7)

-         module.state = 4

+         module.state = module_state

          db_session.commit()

  

          rv = self.client.patch(

              "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"}))

  

-         data = json.loads(rv.data)

-         assert data["status"] == 403

-         assert data["error"] == "Forbidden"

+         assert rv.status_code == 400

+         assert rv.json == {

+             "error": "Bad Request",

+             "message": (

+                 "To cancel a module build, it must be in one of the following states: "

+                 "build, init, wait"

+             ),

+             "status": 400,

+         }

  

      @patch("module_build_service.auth.get_user", return_value=("sammy", set()))

      def test_cancel_build_unauthorized_no_groups(self, mocked_get_user):
@@ -1328,11 +1334,13 @@ 

      def test_cancel_build_wrong_state(self, mocked_get_user):

          rv = self.client.patch(

              "/module-build-service/1/module-builds/7", data=json.dumps({"state": "some_state"}))

-         data = json.loads(rv.data)

  

-         assert data["status"] == 400

-         assert data["error"] == "Bad Request"

-         assert data["message"] == "The provided state change is not supported"

+         assert rv.status_code == 400

+         assert rv.json == {

+             "error": "Bad Request",

+             "message": "An invalid state was submitted. Valid states values are: failed, 4",

+             "status": 400,

+         }

  

      @patch("module_build_service.auth.get_user", return_value=user)

      def test_submit_build_unsupported_scm_scheme(self, mocked_get_user):

file modified
+26 -12
@@ -4,17 +4,12 @@ 

  # and then run "tox" from this directory.

  

  [tox]

- envlist = flake8, py27, py3

+ envlist = flake8, intflake, py27, py3

  

  [flake8]

  ignore = E731,W503

  max-line-length = 100

- exclude =

-     ./.tox

-     ./.git

-     ./module_build_service/migrations

-     ./build

-     ./.env

+ exclude = .tox,.git,module_build_service/migrations,build,.env

  

  [testenv]

  usedevelop = true
@@ -23,7 +18,7 @@ 

  passenv = DATABASE_URI

  whitelist_externals =

      flake8

-     py.test-3

+     py.test

  deps = -r{toxinidir}/test-requirements.txt

  commands =

      py.test -v \
@@ -49,7 +44,16 @@ 

  basepython = python2

  skip_install = true

  deps = flake8

- commands = flake8

+ # doing this until --extend-exclude support becomes available

+ # https://flake8.readthedocs.io/en/latest/user/options.html#cmdoption-flake8-extend-exclude

+ commands = flake8 --exclude={[flake8]exclude},tests/integration

+ 

+ [testenv:intflake]

+ basepython = python3

+ skip_install = true

+ sitepackages = false

+ deps = flake8

+ commands = flake8 tests/integration

  

  [testenv:bandit]

  basepython = python2
@@ -66,11 +70,21 @@ 

  sitepackages = false

  # let's handle integration test deps separately

  deps =

+     kobo

+     koji

      pytest

-     requests

+     pytest-html

+     pytest-xdist

      PyYAML

+     requests

+     sh

  # Set this to /etc/pki/tls/certs/ca-bundle.crt, for example,

  # if the instance tested has a self-signed certificate.

- passenv = REQUESTS_CA_BUNDLE MBS_TEST_CONFIG

+ passenv = REQUESTS_CA_BUNDLE MBS_TEST_CONFIG MBS_TEST_WORKERS HOME

  commands =

-     pytest -vv --confcutdir=tests/integration {posargs:tests/integration}

+     pytest -rA -vv \

+         --confcutdir=tests/integration \

+         -n {env:MBS_TEST_WORKERS:0} \

+         --html=report.html \

+         --self-contained-html \

+         {posargs:tests/integration}

no initial comment

@qwan feel free to rebase this locally and push to the v3 branch if the tests pass. It's too difficult to review since I don't know what you did to resolve conflicts.

Build #691 failed (commit: 6d6ce0d).
Rebase or make new commits to rebuild.

@mprahl , tests passed locally, the most interesting part of the conflicts is generated from the commit [fdf8733] in module_build_service/scheduler/producer.py, git doesn't produce a reasonable conflict hint, but after resolved the conflict, the latest version of producer.py has the same content as what it is in current v3 branch, so there should no problem with it.

I'll push this tomorrow If @cqi has no objection.

Same feeling here. This is too big to review every changes. Just push to v3 and then fix any issue if there is.

Thanks all, I've force pushed the rebased v3 branch to upstream repo, you will need to checkout the new v3 branch due to the rebase.

Pull-Request has been closed by qwan

4 years ago
Changes Summary 77
+1 -0
file changed
.gitignore
+24 -73
file changed
Vagrantfile
+23 -35
file changed
conf/config.py
+6 -1
file changed
contrib/run-unittests.sh
+2 -0
file changed
docker/Dockerfile-tests
+2 -0
file changed
docker/Dockerfile-tests-py3
+1 -1
file changed
docker/test-py3.sh
+12 -0
file changed
docs/CHANGELOG.rst
+1 -0
file changed
docs/CONTRIBUTING.rst
+13 -0
file changed
module_build_service/__init__.py
+3 -2
file changed
module_build_service/builder/KojiContentGenerator.py
+28 -30
file changed
module_build_service/builder/KojiModuleBuilder.py
+25 -20
file changed
module_build_service/builder/MockModuleBuilder.py
-100
file removed
module_build_service/builder/koji_backports.py
+8 -0
file changed
module_build_service/builder/utils.py
+14 -2
file changed
module_build_service/config.py
+4 -0
file changed
module_build_service/errors.py
+5 -6
file changed
module_build_service/manage.py
+13 -275
file changed
module_build_service/messaging.py
+13 -44
file changed
module_build_service/models.py
+0 -84
file changed
module_build_service/scheduler/__init__.py
+96 -100
file changed
module_build_service/scheduler/consumer.py
+1 -3
file changed
module_build_service/scheduler/default_modules.py
+87
file added
module_build_service/scheduler/events.py
+57 -49
file changed
module_build_service/scheduler/handlers/components.py
+21 -17
file changed
module_build_service/scheduler/handlers/greenwave.py
+70 -52
file changed
module_build_service/scheduler/handlers/modules.py
+25 -24
file changed
module_build_service/scheduler/handlers/repos.py
+28 -23
file changed
module_build_service/scheduler/handlers/tags.py
+65
file added
module_build_service/scheduler/local.py
+130
file added
module_build_service/scheduler/parser.py
+420 -435
file changed
module_build_service/scheduler/producer.py
+8 -14
file changed
module_build_service/utils/batches.py
+3 -3
file changed
module_build_service/utils/general.py
+40 -43
file changed
module_build_service/utils/reuse.py
+2 -2
file changed
module_build_service/utils/submit.py
+21 -8
file changed
module_build_service/views.py
+0 -2
file changed
openshift/integration/koji/pipelines/jobs/mbs-prod-integration-test.env
+0 -2
file changed
openshift/integration/koji/pipelines/jobs/mbs-stage-integration-test.env
+52 -79
file changed
openshift/integration/koji/pipelines/templates/mbs-build.Jenkinsfile
+5 -22
file changed
openshift/integration/koji/pipelines/templates/mbs-integration-test-template.yaml
+30 -182
file changed
openshift/integration/koji/pipelines/templates/mbs-integration-test.Jenkinsfile
+7 -15
file changed
openshift/integration/koji/pipelines/templates/mbs-polling-pagure.yaml
+14 -9
file changed
openshift/integration/koji/pipelines/tests/module-build-br-virtual-stream.groovy
+15 -10
file changed
openshift/integration/koji/pipelines/tests/module-build-cgimport.groovy
+13 -8
file changed
openshift/integration/koji/pipelines/tests/module-build-init.groovy
+6 -4
file changed
requirements.txt
+1 -2
file changed
setup.py
+1 -0
file changed
tests/__init__.py
+12 -2
file changed
tests/integration/README.rst
+42 -13
file changed
tests/integration/conftest.py
+56 -2
file changed
tests/integration/example.test.env.yaml
+33
file added
tests/integration/test_failed_build.py
+42 -3
file changed
tests/integration/test_normal_build.py
+33
file added
tests/integration/test_resume_cancelled_build.py
+36
file added
tests/integration/test_reuse_all_components.py
+55
file added
tests/integration/test_reuse_components.py
+33
file added
tests/integration/test_scratch_build.py
+360 -9
file changed
tests/integration/utils.py
+141 -60
file changed
tests/test_build/test_build.py
+74 -51
file changed
tests/test_builder/test_koji.py
+12 -13
file changed
tests/test_content_generator.py
+6 -6
file changed
tests/test_manage.py
+13 -9
file changed
tests/test_messaging.py
+6 -6
file changed
tests/test_resolver/test_koji.py
+10 -10
file changed
tests/test_scheduler/test_consumer.py
+5 -5
file changed
tests/test_scheduler/test_default_modules.py
+30 -13
file changed
tests/test_scheduler/test_greenwave.py
+6 -14
file changed
tests/test_scheduler/test_module_init.py
+15 -20
file changed
tests/test_scheduler/test_module_wait.py
+89 -220
file changed
tests/test_scheduler/test_poller.py
+23 -23
file changed
tests/test_scheduler/test_repo_done.py
+114 -161
file changed
tests/test_scheduler/test_tag_tagged.py
+3 -3
file changed
tests/test_utils/test_ursine.py
+149 -41
file changed
tests/test_utils/test_utils.py
+19 -11
file changed
tests/test_views/test_views.py
+26 -12
file changed
tox.ini