Package backend :: Package daemons :: Module job_grab
[hide private]
[frames] | no frames]

Source Code for Module backend.daemons.job_grab

  1  # coding: utf-8 
  2   
  3  from __future__ import print_function 
  4  from __future__ import unicode_literals 
  5  from __future__ import division 
  6  from __future__ import absolute_import 
  7  from collections import defaultdict 
  8   
  9  from multiprocessing import Process 
 10  import time 
 11  from setproctitle import setproctitle 
 12   
 13  from requests import get, RequestException 
 14  from retask.task import Task 
 15  from retask.queue import Queue 
 16   
 17  from ..actions import Action 
 18  from ..exceptions import CoprJobGrabError 
 19  from ..frontend import FrontendClient 
 20   
 21   
 22  # TODO: Replace entire model with asynchronous queue, so that frontend push task, 
 23  # and workers listen for them 
24 -class CoprJobGrab(Process):
25 26 """ 27 Fetch jobs from the Frontend 28 29 - submit build task to the jobs queue for workers 30 - run Action handler for action tasks 31 32 33 :param Bunch opts: backend config 34 :param events: :py:class:`multiprocessing.Queue` to listen 35 for events from other backend components 36 :param lock: :py:class:`multiprocessing.Lock` global backend lock 37 38 """ 39
40 - def __init__(self, opts, events, lock):
41 # base class initialization 42 Process.__init__(self, name="jobgrab") 43 44 self.opts = opts 45 self.events = events 46 self.task_queues_by_arch = {} 47 48 self.added_jobs = set() 49 self.lock = lock
50
51 - def connect_queues(self):
52 """ 53 Connects to the retask queues. One queue per builders group. 54 """ 55 for group in self.opts.build_groups: 56 queue = Queue("copr-be-{0}".format(group["id"])) 57 queue.connect() 58 59 for arch in group["archs"]: 60 self.task_queues_by_arch[arch] = queue
61
62 - def event(self, what):
63 """ 64 Put new event into the event queue 65 66 :param what: message to put into the queue 67 """ 68 self.events.put({"when": time.time(), "who": "jobgrab", "what": what})
69
70 - def process_build_task(self, task):
71 """ 72 Route build task to the appropriate queue. 73 :param task: dict-like object which represent build task 74 75 Utilized **task** keys: 76 77 - ``task_id`` 78 - ``chroot`` 79 - ``arch`` 80 81 :return int: Count of the successfully routed tasks 82 """ 83 count = 0 84 if "task_id" in task: 85 if task["task_id"] not in self.added_jobs: 86 87 # TODO: produces memory leak! 88 self.added_jobs.add(task["task_id"]) 89 arch = task["chroot"].split("-")[2] 90 if arch not in self.task_queues_by_arch: 91 raise CoprJobGrabError("No builder group for architecture: {}, task: {}" 92 .format(arch, task)) 93 94 task_obj = Task(task) 95 self.task_queues_by_arch[arch].enqueue(task_obj) 96 count += 1 97 # else: 98 # self.event("Task `{}` was already sent builder, ignoring".format(task["task_id"])) 99 100 else: 101 self.event("Task missing field `task_id`, raw task: {}".format(task)) 102 return count
103
104 - def process_action(self, action):
105 """ 106 Run action task handler, see :py:class:`~backend.action.Action` 107 108 :param action: dict-like object with action task 109 """ 110 ao = Action(self.events, action, self.lock, destdir=self.opts.destdir, 111 frontend_callback=FrontendClient(self.opts, self.events), 112 front_url=self.opts.frontend_base_url, 113 results_root_url=self.opts.results_baseurl) 114 ao.run()
115
116 - def load_tasks(self):
117 """ 118 Retrieve tasks from frontend and runs appropriate handlers 119 """ 120 try: 121 r = get("{0}/waiting/".format(self.opts.frontend_url), 122 auth=("user", self.opts.frontend_auth)) 123 except RequestException as e: 124 self.event("Error retrieving jobs from {0}: {1}" 125 .format(self.opts.frontend_url, e)) 126 return 127 128 try: 129 r_json = r.json() 130 except ValueError as e: 131 self.event("Error getting JSON build list from FE {0}".format(e)) 132 return 133 134 if r_json.get("builds"): 135 self.event("{0} jobs returned".format(len(r_json["builds"]))) 136 count = 0 137 for task in r_json["builds"]: 138 try: 139 count += self.process_build_task(task) 140 except CoprJobGrabError as err: 141 self.event("Failed to enqueue new job: {} with error: {}" 142 .format(task, err)) 143 144 if count: 145 self.event("New jobs: %s" % count) 146 147 if r_json.get("actions"): 148 self.event("{0} actions returned".format(len(r_json["actions"]))) 149 150 for action in r_json["actions"]: 151 try: 152 self.process_action(action) 153 except Exception as error: 154 self.event("Error during processing action `{}`: {}" 155 .format(action, error))
156
157 - def run(self):
158 """ 159 Starts job grabber process 160 """ 161 setproctitle("CoprJobGrab") 162 self.connect_queues() 163 try: 164 while True: 165 self.load_tasks() 166 time.sleep(self.opts.sleeptime) 167 except KeyboardInterrupt: 168 return
169