#2139 backend: speedup the background-process spawner
Merged 2 years ago by praiskup. Opened 2 years ago by praiskup.

@@ -5,7 +5,6 @@

  import time

  import traceback

  import base64

- import subprocess

  

  from distutils.dir_util import copy_tree

  from distutils.errors import DistutilsFileError
@@ -577,7 +576,7 @@

              '--worker-id', worker_id,

          ]

          # TODO: mark as started on FE, and let user know in UI

-         subprocess.check_call(command)

+         self.start_daemon_on_background(command)

  

      def finish_task(self, worker_id, task_info):

          task_id = self.get_task_id_from_worker_id(worker_id)

@@ -2,8 +2,6 @@

  Abstraction for RPM and SRPM builds on backend.

  """

  

- import subprocess

- 

  from copr_backend.helpers import get_chroot_arch

  from copr_backend.worker_manager import (

      PredicateWorkerLimit,
@@ -163,7 +161,7 @@

              "--worker-id", worker_id,

          ]

          self.log.info("running worker: %s", " ".join(command))

-         subprocess.check_call(command)

+         self.start_daemon_on_background(command)

  

      def finish_task(self, worker_id, task_info):

          self.get_task_id_from_worker_id(worker_id)

@@ -2,8 +2,8 @@

  import time

  from heapq import heappop, heappush

  import itertools

- import redis

  import logging

+ import subprocess

  

  

  class WorkerLimit:
@@ -446,6 +446,7 @@

  

              self._start_worker(task, now)

  

+         self.log.debug("Reaped %s processes", self._clean_daemon_processes())

          self.log.debug("Worker.run() stop at time %s", time.time())

  

      def _start_worker(self, task, time_now):
@@ -515,3 +516,36 @@

                  # The worker could finish in the meantime, make sure we

                  # hgetall() once more.

                  self.redis.hset(worker_id, 'delete', 1)

+ 

+     def start_daemon_on_background(self, command, env=None):

+         """

+         The daemon.DaemonContext() is pretty slow thing, it may take up to 1s

+         to finish and return the exit status to the parent process.  But if the

+         calling logic is properly prepared for potential startup failures, we

+         don't have to wait at all and we can start the background process on

+         background, too.  Typical work-around for starting the

+         'copr-backend-process-*' scripts that are based on the

+         BackgroundWorker.process() logic.

+         """

+         # pylint: disable=consider-using-with

+         process = subprocess.Popen(command, env=env)

+         self.log.debug("background pid=%s started (%s)", process.pid, command)

+ 

+     def _clean_daemon_processes(self):

+         """

+         Wait for all the finished subprocesses to avoid the leftover zombies.

+         Return the number of successfully waited processes.  Complements the

+         start_daemon_on_background() above, but called automatically.

+         """

+         counter = 0

+         try:

+             # Wait for any background process (pid=-1), and no additional

+             # options are needed (options=0).  All the background processes

+             # should quit relatively fast (within one second).

+             while True:

+                 (pid, _) = os.waitpid(-1, 0)

+                 self.log.debug("Worker Manager waited for pid=%s", pid)

+                 counter += 1

+         except ChildProcessError:

+             pass

+         return counter

file modified
+2 -2
@@ -136,7 +136,7 @@

      @mock.patch("copr_backend.actions.copy_tree")

      @mock.patch("copr_backend.actions.os.path.exists")

      @mock.patch("copr_backend.actions.unsign_rpms_in_dir")

-     @mock.patch("copr_backend.actions.subprocess.Popen")

+     @mock.patch("copr_backend.helpers.subprocess.Popen")

      def test_action_handle_forks(self, mc_popen, mc_unsign_rpms_in_dir,

                                   mc_exists, mc_copy_tree, mc_time):

          mc_popen.return_value.communicate.return_value = ("", "")
@@ -747,7 +747,7 @@

      # We want to test that ACR flag doesn't make any difference here, explicit

      # createrepo always works with non-devel directory.

      @pytest.mark.parametrize('devel', [False, True])

-     @mock.patch("copr_backend.actions.subprocess.Popen")

+     @mock.patch("copr_backend.helpers.subprocess.Popen")

      @mock.patch("copr_backend.actions.uses_devel_repo")

      def test_handle_createrepo_ok(self, mc_devel, mc_sp_popen, mc_time, devel):

          mc_sp_popen.return_value.communicate.return_value = ("", "")

@@ -5,7 +5,6 @@

  import copy

  import time

  import logging

- import subprocess

  from unittest.mock import MagicMock, patch

  

  import pytest
@@ -28,11 +27,15 @@

  log = logging.getLogger()

  log.setLevel(logging.DEBUG)

  

+ # pylint: disable=too-many-instance-attributes,protected-access

+ 

  

  class ToyWorkerManager(WorkerManager):

-     # pylint: disable=abstract-method,too-many-instance-attributes

+     # pylint: disable=abstract-method

      process_counter = 0

      task_sleep = 0

+     started_in_cycle = 0

+     expected_terminations_in_cycle = None

  

      def start_task(self, worker_id, task):

          self.process_counter += 1
@@ -51,7 +54,30 @@

          if task_env:

             environ.update(task_env)

  

-         subprocess.check_call(list(map(str, cmd)), env=environ)

+         start = time.time()

+         #subprocess.check_call(list(map(str, cmd)), env=environ)

+         retyped_cmd = list(map(str, cmd))

+         self.start_daemon_on_background(retyped_cmd, env=environ)

+         self.log.debug("starting-on-background-took %s (%s)",

+                        time.time() - start, retyped_cmd)

+         self.started_in_cycle += 1

+ 

+     def _clean_daemon_processes(self):

+         """

+         Check that we are not leaving any zombies behind us

+         """

+         waited = super()._clean_daemon_processes()

+         self.log.debug("cleaned up %s, started %s", waited, self.started_in_cycle)

+         if waited != self.started_in_cycle:

+             if self.expected_terminations_in_cycle is not None:

+                 assert self.expected_terminations_in_cycle == waited

+                 return waited

+             assert False

+         return waited

+ 

+     def run(self, *args, **kwargs):

+         self.started_in_cycle = 0

+         return super().run(*args, **kwargs)

  

      def finish_task(self, _w_id, _tinfo):

          pass
@@ -232,6 +258,7 @@

          self.worker_manager._start_worker(task, time.time())

          worker_id = self.worker_manager.get_worker_id(repr(task))

          assert len(self.redis.keys(worker_id)) == 1

+         self.worker_manager._clean_daemon_processes()

  

      def test_number_of_tasks(self):

          assert self.remaining_tasks() == 10
@@ -378,6 +405,12 @@

          assert self.w0 not in keys

  

      def test_all_passed(self, caplog):

+         # It is a lot of fun with Popen().  It seems it has some zombie reaping

+         # mechanism.  If the calling function objects are destroyed (including

+         # the Popen() return value reference), the future call to Popen() seems

+         # to just reap the old Popen() processes.

+         self.worker_manager.expected_terminations_in_cycle = 5

+ 

          self.worker_manager.run(timeout=100)

          for i in range(0, 10):

              smsg = "Starting worker {}{}, task.priority=0"

The process handling in WorkerManager should be robust enough to avoid
checking the BackgroundWorker:process() exit status. We actually never
provided any useful exit status value from there (except for potential
daemon.DaemonContext() failures) — all the application logic info is
propagated through Redis.

So instead of waiting for the exit value in subprocess.check_call(),
do the "startup" of the BackgroundWorker process in background too,
using just Popen(). If the daemonizing logic ever fails, we will retry
the task anyway in the next cycle(s) (Redis "started" field will not be
set).

This change might look like a fork-bomb will be much easier to happen,
but we have all the WorkerLimit stuff in place and we don't ever start
more BackgroundWorker processes than allowed. But we'll be doing more
concurrency during the process startup so the overall average startup
time of process might prolong, and the code might be less reliable on a
very, very slow backend machines (see the option worker_timeout_start
which is still on 30s by default).

The speedup measured on my (otherwise idling) i7-10850H box was from
0.37s to 0.00065, IOW more than 500x faster. This means that in the 20s
sleeptime window we should be able to start several thousands of
processes (should be really enough to feed all the allocated workers).

Relates: #2095

Build failed. More information on how to proceed and troubleshoot errors available at https://fedoraproject.org/wiki/Zuul-based-ci

rebased onto 3981dc2714a2359656209e8105f752f7c382be48

2 years ago

Build succeeded.

rebased onto 001132514248ee5bc1f4e3b00bb09fd7f8e33ed9

2 years ago

Build succeeded.

See the difference. Tested in https://copr.stg.fedoraproject.org/coprs/praiskup/ping/.

Before applying the patch
- each worker start takes more than two seconds (even more than I originally
expected)
- the tasks are started in two rounds, see the [2022-03-25 11:18:04,467] note
about fetching the tasks again

[2022-03-25 11:16:21,415][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:16:41,435][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:17:01,454][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:17:03,148][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:_print_added_jobs:83] Got new 'build' tasks: {'2907150'}
[2022-03-25 11:17:03,175][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150, task.priority=1
[2022-03-25 11:17:03,176][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot srpm-builds --worker-id rpm_build_worker:2907150
[2022-03-25 11:17:15,258][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_cleanup_workers:489] Finished worker rpm_build_worker:2907150
[2022-03-25 11:17:21,461][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:17:41,481][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:17:42,898][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:_print_added_jobs:83] Got new 'build' tasks: {'2907150-mageia-8-x86_64', '2907150-epel-6-x86_64', '2907150-fedora-35-ppc64le', '2907150-epel-7-x86_64', '2907150-centos-stream-8-ppc64le', '2907150-epel-9-aarch64', '2907150-fedora-34-aarch64', '2907150-fedora-34-i386', '2907150-centos-stream-8-x86_64', '2907150-fedora-rawhide-x86_64', '2907150-fedora-36-armhfp', '2907150-fedora-35-s390x', '2907150-fedora-rawhide-ppc64le', '2907150-epel-6-i386', '2907150-fedora-rawhide-s390x', '2907150-opensuse-tumbleweed-x86_64', '2907150-opensuse-leap-15.3-x86_64', '2907150-fedora-36-aarch64', '2907150-opensuse-tumbleweed-aarch64', '2907150-epel-9-s390x', '2907150-centos-stream-9-ppc64le', '2907150-mageia-cauldron-aarch64', '2907150-openmandriva-cooker-i686', '2907150-opensuse-leap-15.2-x86_64', '2907150-fedora-rawhide-i386', '2907150-epel-8-x86_64', '2907150-openmandriva-cooker-aarch64', '2907150-fedora-36-x86_64', '2907150-fedora-36-s390x', '2907150-fedora-34-x86_64', '2907150-fedora-eln-aarch64', '2907150-centos-stream-9-x86_64', '2907150-fedora-34-ppc64le', '2907150-fedora-35-armhfp', '2907150-fedora-36-ppc64le', '2907150-centos-stream-9-aarch64', '2907150-opensuse-leap-15.2-aarch64', '2907150-fedora-eln-i386', '2907150-epel-8-aarch64', '2907150-fedora-eln-x86_64', '2907150-opensuse-tumbleweed-i586', '2907150-fedora-35-x86_64', '2907150-mageia-cauldron-x86_64', '2907150-fedora-rawhide-aarch64', '2907150-fedora-35-aarch64', '2907150-fedora-34-armhfp', '2907150-openmandriva-rolling-i686', '2907150-openmandriva-rolling-x86_64', '2907150-epel-7-ppc64le', '2907150-fedora-36-i386', '2907150-mageia-8-i586', '2907150-fedora-35-i386', '2907150-fedora-eln-s390x', '2907150-fedora-34-s390x', '2907150-epel-9-ppc64le', '2907150-centos-stream-8-aarch64', '2907150-epel-8-ppc64le', '2907150-mageia-cauldron-i586', '2907150-opensuse-tumbleweed-ppc64le', '2907150-epel-8-s390x', '2907150-epel-9-x86_64', '2907150-mageia-8-aarch64'}
[2022-03-25 11:17:42,940][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-rawhide-x86_64, task.priority=1000001
[2022-03-25 11:17:42,941][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-rawhide-x86_64 --worker-id rpm_build_worker:2907150-fedora-rawhide-x86_64
[2022-03-25 11:17:43,954][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-rawhide-aarch64, task.priority=1000001
[2022-03-25 11:17:43,954][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-rawhide-aarch64 --worker-id rpm_build_worker:2907150-fedora-rawhide-aarch64
[2022-03-25 11:17:45,097][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-rawhide-ppc64le, task.priority=1000001
[2022-03-25 11:17:45,099][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-rawhide-ppc64le --worker-id rpm_build_worker:2907150-fedora-rawhide-ppc64le
[2022-03-25 11:17:46,272][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-rawhide-s390x, task.priority=1000001
[2022-03-25 11:17:46,274][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-rawhide-s390x --worker-id rpm_build_worker:2907150-fedora-rawhide-s390x
[2022-03-25 11:17:47,343][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-34-armhfp, task.priority=1000001
[2022-03-25 11:17:47,345][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-34-armhfp --worker-id rpm_build_worker:2907150-fedora-34-armhfp
[2022-03-25 11:17:49,362][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-rawhide-i386, task.priority=1000002
[2022-03-25 11:17:49,364][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-rawhide-i386 --worker-id rpm_build_worker:2907150-fedora-rawhide-i386
[2022-03-25 11:17:52,132][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-opensuse-tumbleweed-ppc64le, task.priority=1000002
[2022-03-25 11:17:52,133][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot opensuse-tumbleweed-ppc64le --worker-id rpm_build_worker:2907150-opensuse-tumbleweed-ppc64le
[2022-03-25 11:17:54,123][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-mageia-cauldron-aarch64, task.priority=1000002
[2022-03-25 11:17:54,123][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot mageia-cauldron-aarch64 --worker-id rpm_build_worker:2907150-mageia-cauldron-aarch64
[2022-03-25 11:17:56,940][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-34-s390x, task.priority=1000002
[2022-03-25 11:17:56,943][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-34-s390x --worker-id rpm_build_worker:2907150-fedora-34-s390x
[2022-03-25 11:17:59,677][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-fedora-35-armhfp, task.priority=1000002
[2022-03-25 11:17:59,679][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot fedora-35-armhfp --worker-id rpm_build_worker:2907150-fedora-35-armhfp
[2022-03-25 11:18:01,944][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-opensuse-tumbleweed-i586, task.priority=1000003
[2022-03-25 11:18:01,946][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot opensuse-tumbleweed-i586 --worker-id rpm_build_worker:2907150-opensuse-tumbleweed-i586
[2022-03-25 11:18:04,467][  INFO][PID:587212][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:18:06,353][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-mageia-8-x86_64, task.priority=1000001
[2022-03-25 11:18:06,354][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot mageia-8-x86_64 --worker-id rpm_build_worker:2907150-mageia-8-x86_64
[2022-03-25 11:18:08,290][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-mageia-8-aarch64, task.priority=1000001
[2022-03-25 11:18:08,291][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot mageia-8-aarch64 --worker-id rpm_build_worker:2907150-mageia-8-aarch64
[2022-03-25 11:18:10,520][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-centos-stream-8-ppc64le, task.priority=1000001
[2022-03-25 11:18:10,521][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot centos-stream-8-ppc64le --worker-id rpm_build_worker:2907150-centos-stream-8-ppc64le
[2022-03-25 11:18:12,832][  INFO][PID:587212][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907150-mageia-8-i586, task.priority=1000002
[2022-03-25 11:18:12,834][  INFO][PID:587212][backend.build_dispatcher][rpm_builds.py:start_task:165] running worker: copr-backend-process-build --daemon --build-id 2907150 --chroot mageia-8-i586 --worker-id rpm_build_worker:2907150-mageia-8-i586

After applying the patch, all tasks started within a single second:

[2022-03-25 11:23:37,707][  INFO][PID:2981038][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:23:57,725][  INFO][PID:2981038][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:23:59,442][  INFO][PID:2981038][backend.build_dispatcher][dispatcher.py:_print_added_jobs:83] Got new 'build' tasks: {'2907151-epel-9-ppc64le', '2907151-opensuse-leap-15.3-x86_64', '2907151-fedora-eln-x86_64', '2907151-fedora-36-aarch64', '2907151-epel-6-x86_64', '2907151-centos-stream-9-ppc64le', '2907151-mageia-cauldron-i586', '2907151-openmandriva-cooker-aarch64', '2907151-centos-stream-9-aarch64', '2907151-mageia-cauldron-x86_64', '2907151-fedora-36-armhfp', '2907151-epel-8-s390x', '2907151-fedora-rawhide-ppc64le', '2907151-fedora-36-s390x', '2907151-fedora-rawhide-s390x', '2907151-fedora-rawhide-i386', '2907151-fedora-35-s390x', '2907151-openmandriva-rolling-x86_64', '2907151-opensuse-tumbleweed-aarch64', '2907151-fedora-35-armhfp', '2907151-fedora-36-ppc64le', '2907151-openmandriva-cooker-i686', '2907151-fedora-34-i386', '2907151-opensuse-tumbleweed-i586', '2907151-fedora-rawhide-aarch64', '2907151-epel-8-x86_64', '2907151-fedora-eln-i386', '2907151-epel-8-ppc64le', '2907151-openmandriva-rolling-i686', '2907151-fedora-35-ppc64le', '2907151-centos-stream-8-ppc64le', '2907151-fedora-35-aarch64', '2907151-fedora-34-ppc64le', '2907151-fedora-rawhide-x86_64', '2907151-epel-7-ppc64le', '2907151-mageia-8-i586', '2907151-fedora-34-x86_64', '2907151-centos-stream-8-aarch64', '2907151-mageia-8-aarch64', '2907151-fedora-35-i386', '2907151-fedora-36-i386', '2907151-centos-stream-8-x86_64', '2907151-fedora-eln-aarch64', '2907151-mageia-8-x86_64', '2907151-opensuse-tumbleweed-ppc64le', '2907151-fedora-34-s390x', '2907151-opensuse-leap-15.2-aarch64', '2907151-opensuse-tumbleweed-x86_64', '2907151-centos-stream-9-x86_64', '2907151-fedora-36-x86_64', '2907151-epel-9-s390x', '2907151-fedora-34-aarch64', '2907151-epel-6-i386', '2907151-opensuse-leap-15.2-x86_64', '2907151-mageia-cauldron-aarch64', '2907151-epel-9-x86_64', '2907151-fedora-34-armhfp', '2907151-fedora-eln-s390x', '2907151-fedora-35-x86_64', '2907151-epel-8-aarch64', '2907151-epel-7-x86_64', '2907151-epel-9-aarch64'}
[2022-03-25 11:23:59,476][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-rawhide-aarch64, task.priority=1000001
[2022-03-25 11:23:59,478][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-rawhide-aarch64 --worker-id rpm_build_worker:2907151-fedora-rawhide-aarch64
[2022-03-25 11:23:59,480][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-35-i386, task.priority=1000001
[2022-03-25 11:23:59,481][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-35-i386 --worker-id rpm_build_worker:2907151-fedora-35-i386
[2022-03-25 11:23:59,485][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-35-armhfp, task.priority=1000001
[2022-03-25 11:23:59,486][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-35-armhfp --worker-id rpm_build_worker:2907151-fedora-35-armhfp
[2022-03-25 11:23:59,497][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-35-s390x, task.priority=1000001
[2022-03-25 11:23:59,497][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-35-s390x --worker-id rpm_build_worker:2907151-fedora-35-s390x
[2022-03-25 11:23:59,511][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-epel-9-ppc64le, task.priority=1000001
[2022-03-25 11:23:59,511][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot epel-9-ppc64le --worker-id rpm_build_worker:2907151-epel-9-ppc64le
[2022-03-25 11:23:59,526][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-35-aarch64, task.priority=1000002
[2022-03-25 11:23:59,527][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-35-aarch64 --worker-id rpm_build_worker:2907151-fedora-35-aarch64
[2022-03-25 11:23:59,546][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-mageia-8-x86_64, task.priority=1000002
[2022-03-25 11:23:59,548][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot mageia-8-x86_64 --worker-id rpm_build_worker:2907151-mageia-8-x86_64
[2022-03-25 11:23:59,556][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-epel-9-s390x, task.priority=1000002
[2022-03-25 11:23:59,557][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot epel-9-s390x --worker-id rpm_build_worker:2907151-epel-9-s390x
[2022-03-25 11:23:59,574][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-centos-stream-9-ppc64le, task.priority=1000002
[2022-03-25 11:23:59,576][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot centos-stream-9-ppc64le --worker-id rpm_build_worker:2907151-centos-stream-9-ppc64le
[2022-03-25 11:23:59,587][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-36-armhfp, task.priority=1000002
[2022-03-25 11:23:59,588][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-36-armhfp --worker-id rpm_build_worker:2907151-fedora-36-armhfp
[2022-03-25 11:23:59,599][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-mageia-8-i586, task.priority=1000003
[2022-03-25 11:23:59,600][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot mageia-8-i586 --worker-id rpm_build_worker:2907151-mageia-8-i586
[2022-03-25 11:23:59,618][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-34-aarch64, task.priority=1000003
[2022-03-25 11:23:59,619][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-34-aarch64 --worker-id rpm_build_worker:2907151-fedora-34-aarch64
[2022-03-25 11:23:59,649][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-36-ppc64le, task.priority=1000003
[2022-03-25 11:23:59,650][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-36-ppc64le --worker-id rpm_build_worker:2907151-fedora-36-ppc64le
[2022-03-25 11:23:59,709][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-fedora-34-armhfp, task.priority=1000003
[2022-03-25 11:23:59,716][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot fedora-34-armhfp --worker-id rpm_build_worker:2907151-fedora-34-armhfp
[2022-03-25 11:23:59,757][  INFO][PID:2981038][backend.build_dispatcher][worker_manager.py:_start_worker:454] Starting worker rpm_build_worker:2907151-epel-9-x86_64, task.priority=1000004
[2022-03-25 11:23:59,760][  INFO][PID:2981038][backend.build_dispatcher][rpm_builds.py:start_task:163] running worker: copr-backend-process-build --daemon --build-id 2907151 --chroot epel-9-x86_64 --worker-id rpm_build_worker:2907151-epel-9-x86_64
[2022-03-25 11:24:19,610][  INFO][PID:2981038][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend
[2022-03-25 11:24:41,943][  INFO][PID:2981038][backend.build_dispatcher][dispatcher.py:run:105] getting builds from frontend

You should somewhere run: os.wait3(os.WNOHANG)

rebased onto fe20c76a187172a0a9a62d7c6724693149af7af8

2 years ago

Build succeeded.

rebased onto dd88b378298dc9555d845d005cfce318b6d8a2b0

2 years ago

Build failed. More information on how to proceed and troubleshoot errors available at https://fedoraproject.org/wiki/Zuul-based-ci

rebased onto 61ff0f2

2 years ago

Build succeeded.

[copr-build] (testing if pagure events under non-root user work)

Commit c273ca5 fixes this pull-request

Pull-Request has been merged by praiskup

2 years ago

Commit 5a55d53 fixes this pull-request

Pull-Request has been merged by praiskup

2 years ago