#205 [frontend][distgit][backend] add source_status field for Builds
Merged 6 years ago by clime. Opened 6 years ago by clime.

@@ -23,6 +23,7 @@ 

      RUNNING = 3

      PENDING = 4

      SKIPPED = 5

+     STARTING = 6

  

      @classmethod

      def string(cls, number):

@@ -49,7 +49,7 @@ 

              self.update_process_title("Waiting for an action task from frontend for {}s"

                                        .format(int(time.time() - get_action_init_time)))

              try:

-                 r = get("{0}/backend/waiting-action/".format(self.opts.frontend_base_url),

+                 r = get("{0}/backend/pending-action/".format(self.opts.frontend_base_url),

                          auth=("user", self.opts.frontend_auth))

                  action_task = r.json()

              except (RequestException, ValueError) as error:

@@ -18,16 +18,18 @@ 

  from ..exceptions import DispatchBuildError, NoVmAvailable

  from ..job import BuildJob

  from ..vm_manage.manager import VmManager

+ from ..constants import BuildStatus

  from .worker import Worker

  

  from collections import defaultdict

  

  class BuildDispatcher(multiprocessing.Process):

      """

-     1) Fetch build task from frontend

-     2) Get an available VM for it

-     3) Create a worker for the job

-     4) Start worker asynchronously and go to 1)

+     1) Fetch build tasks from frontend

+     2) Loop through them and try to allocate VM for each

+        - If VM can be allocated, spawn a worker and run it asynchronously

+        - otherwise, check the next build task

+     3) Go to 1

      """

  

      def __init__(self, opts):
@@ -49,7 +51,6 @@ 

      def get_vm_group_ids(self, arch):

          if not arch:

              return [group["id"] for group in self.opts.build_groups]

- 

          try:

              return self.arch_to_groups[arch]

          except KeyError:
@@ -84,7 +85,7 @@ 

              self.update_process_title("Waiting for jobs from frontend for {} s"

                                        .format(int(time.time() - get_task_init_time)))

              try:

-                 tasks = get("{0}/backend/waiting-jobs/".format(self.opts.frontend_base_url),

+                 tasks = get("{0}/backend/pending-jobs/".format(self.opts.frontend_base_url),

                             auth=("user", self.opts.frontend_auth)).json()

  

              except (RequestException, ValueError) as error:
@@ -99,8 +100,8 @@ 

  

      def can_build_start(self, job):

          """

-         Announce to the frontend that the build is going to start so that

-         it can confirm that and draw out another job for building.

+         Announce to the frontend that the build is starting. Frontend

+         may reject build to start.

  

          Returns

          -------
@@ -108,7 +109,8 @@ 

          False if the build can not start (build is cancelled)

          """

          try:

-             can_build_start = self.frontend_client.starting_build(job.build_id, job.chroot)

+             job.status = BuildStatus.STARTING

+             can_build_start = self.frontend_client.starting_build(job.to_dict())

          except (RequestException, ValueError) as error:

              self.log.exception("Communication with Frontend to confirm build start failed with error: {}".format(error))

              return False
@@ -126,9 +128,6 @@ 

                  self.log.info("Removed finished worker {} for job {}"

                                .format(worker.worker_id, worker.job.task_id))

  

-     def get_worker_ids(self):

-         return [worker.worker_id for worker in self.workers]

- 

      def start_worker(self, vm, job, reattach=False):

          worker = Worker(

              opts=self.opts,
@@ -154,7 +153,14 @@ 

              self.clean_finished_workers()

  

              for job in self.load_jobs():

-                 # search db builder records for the job and

+                 # first check if we do not have

+                 # worker already running for the job

+                 if any([job.task_id == w.job.task_id for w in self.workers]):

+                     self.log.warning("Skipping already running task '{}'"

+                                      .format(job.task_id))

+                     continue

+ 

+                 # now search db builder records for the job and

                  # if we found it, spawn a worker to reattach

                  vm = self.vm_manager.get_vm_by_task_id(job.task_id)

                  if vm and vm.state == 'in_use':

@@ -129,7 +129,6 @@ 

          return False

  

      def init_buses(self):

- 

          self.log.info(self.opts.msg_buses)

          for bus_config in self.opts.msg_buses:

              self.msg_buses.append(MsgBusStomp(bus_config, self.log))
@@ -231,9 +230,6 @@ 

                      self.log.exception("Unexpected error")

                      failed = True

  

-                 finally:

-                     self.vm_manager.release_vm(self.vm.vm_name)

- 

                  if not failed:

                      try:

                          mr.on_success_build()
@@ -275,9 +271,7 @@ 

          return built_packages

  

      def get_srpm_url(self, job):

-         self.log.info("Retrieving srpm URL for {}"

-                       .format(job.results_dir))

- 

+         self.log.info("Retrieving srpm URL for {}".format(job.results_dir))

          try:

              pattern = os.path.join(job.results_dir, '*.src.rpm')

              srpm_name = os.path.basename(glob.glob(pattern)[0])
@@ -368,3 +362,5 @@ 

              self.log.exception("Building error: {}".format(error))

          except Exception as e:

              self.log.exception("Unexpected error: {}".format(e))

+         finally:

+             self.vm_manager.release_vm(self.vm.vm_name)

file modified
+1 -2
@@ -58,13 +58,12 @@ 

          """

          self._post_to_frontend_repeatedly(data, "update")

  

-     def starting_build(self, build_id, chroot_name):

+     def starting_build(self, data):

          """

          Announce to the frontend that a build is starting.

          Return: True if the build can start

                  False if the build can not start (can be cancelled or deleted)

          """

-         data = {"build_id": build_id, "chroot": chroot_name}

          response = self._post_to_frontend_repeatedly(data, "starting_build")

          if "can_start" not in response.json():

              raise RequestException("Bad respond from the frontend")

@@ -225,6 +225,7 @@ 

                  raise BuilderError("Build host `{}` missing mock config for chroot `{}`"

                                     .format(self.hostname, self.job.chroot))

  

+ 

  class SrpmBuilder(Builder):

      def _copr_builder_cmd(self):

          return 'copr-rpmbuild --verbose --drop-resultdir '\

@@ -111,14 +111,14 @@ 

          for val in [True, False]:

              ptfr.return_value.json.return_value = {"can_start": val}

  

-             assert self.fc.starting_build(self.build_id, self.chroot_name) == val

+             assert self.fc.starting_build(self.data) == val

  

      def test_starting_build_err(self):

          ptfr = MagicMock()

          self.fc._post_to_frontend_repeatedly = ptfr

  

          with pytest.raises(RequestException):

-             self.fc.starting_build(self.build_id, self.chroot_name)

+             self.fc.starting_build(self.data)

  

      def test_starting_build_err_2(self):

          ptfr = MagicMock()
@@ -126,7 +126,7 @@ 

          ptfr.return_value.json.return_value = {}

  

          with pytest.raises(RequestException):

-             self.fc.starting_build(self.build_id, self.chroot_name)

+             self.fc.starting_build(self.data)

  

      def test_reschedule_build(self):

          ptfr = MagicMock()

@@ -6,7 +6,7 @@ 

  

  class ImportTask(object):

      def __init__(self):

-         self.task_id = None

+         self.build_id = None

          self.owner = None

          self.project = None

          self.branches = []
@@ -17,7 +17,7 @@ 

          task = ImportTask()

  

          try:

-             task.task_id = task_dict["task_id"]

+             task.build_id = task_dict["build_id"]

              task.owner = task_dict["owner"]

              task.project = task_dict["project"]

              task.branches = task_dict["branches"]

file modified
+9 -34
@@ -35,7 +35,7 @@ 

              # get the data

              r = get(self.get_url)

              # take the first task

-             builds = filter(lambda x: x["task_id"] not in exclude, r.json()["builds"])

+             builds = filter(lambda x: x["build_id"] not in exclude, r.json())

              if not builds:

                  log.debug("No new tasks to process.")

  
@@ -62,23 +62,6 @@ 

              log.error("Failed to post back to frontend : {}".format(data_dict))

              log.exception(str(e))

  

-     def get_result_dict_for_frontend(self, task_id, branch, result):

-         if not result or not branch in result.branch_commits:

-             return {

-                 "task_id": task_id,

-                 "error": "Could not import this branch.",

-                 "branch": branch,

-             }

- 

-         return {

-             "task_id": task_id,

-             "pkg_name": result.pkg_name,

-             "pkg_version": result.pkg_evr,

-             "repo_name": result.reponame,

-             "git_hash": result.branch_commits[branch],

-             "branch": branch,

-         }

- 

      def do_import(self, task):

          """

          :type task: ImportTask
@@ -86,39 +69,31 @@ 

          per_task_log_handler = self.setup_per_task_logging(task)

          workdir = tempfile.mkdtemp()

  

-         result = None

+         result = { "build_id": task.build_id }

          try:

              srpm_path = helpers.download_file(

                  task.srpm_url,

                  workdir

              )

-             result = import_package(

+             result.update(import_package(

                  self.opts,

                  task.repo_namespace,

                  task.branches,

                  srpm_path

-             )

+             ))

          except PackageImportException as e:

              log.exception("Exception raised during package import.")

          finally:

              shutil.rmtree(workdir)

  

-         log.info("sending a responses for branches {0}".format(', '.join(task.branches)))

-         for branch in task.branches:

-             self.post_back_safe(

-                 self.get_result_dict_for_frontend(task.task_id, branch, result)

-             )

- 

+         log.info("sending a response for task {}".format(result))

+         self.post_back_safe(result)

          self.teardown_per_task_logging(per_task_log_handler)

  

      def setup_per_task_logging(self, task):

-         # Avoid putting logs into subdirectories

-         # when dist git branch name contains slashes.

-         task_id = str(task.task_id).replace('/', '_')

- 

          handler = logging.FileHandler(

              os.path.join(self.opts.per_task_log_dir,

-                          "{0}.log".format(task_id))

+                          "{0}.log".format(task.build_id))

          )

          handler.setLevel(logging.DEBUG)

          logging.getLogger('').addHandler(handler)
@@ -149,8 +124,8 @@ 

                  continue

  

              for mb_task in mb_tasks:

-                 p = worker_cls(target=self.do_import, args=[mb_task], id=mb_task.task_id, timeout=3600)

+                 p = worker_cls(target=self.do_import, args=[mb_task], id=mb_task.build_id, timeout=3600)

                  pool.append(p)

                  log.info("Starting worker '{}' with task '{}' (timeout={})"

-                          .format(p.name, mb_task.task_id, p.timeout))

+                          .format(p.name, mb_task.build_id, p.timeout))

                  p.start()

@@ -38,7 +38,7 @@ 

              log.info("Going to terminate worker '{}' with task '{}' due to exceeded timeout {} seconds"

                       .format(worker.name, worker.id, worker.timeout))

              worker.terminate()

-             callback({"task_id": worker.id, "error": TimeoutException.strtype})

+             callback({"build_id": worker.id, "error": TimeoutException.strtype})

              log.info("Worker '{}' with task '{}' was terminated".format(worker.name, worker.timeout))

  

      def remove_dead(self):

file modified
+2 -2
@@ -43,7 +43,7 @@ 

          self.FILE_HASH = "1234abc"

  

          self.url_task_data = {

-             "task_id": 123,

+             "build_id": 123,

              "owner": self.USER_NAME,

              "project": self.PROJECT_NAME,

  
@@ -51,7 +51,7 @@ 

              "srpm_url": "http://example.com/pkg.src.rpm",

          }

          self.upload_task_data = {

-             "task_id": 124,

+             "build_id": 124,

              "owner": self.USER_NAME,

              "project": self.PROJECT_NAME,

  

file modified
+10 -10
@@ -63,7 +63,7 @@ 

  

  class TestImporter(Base):

      def test_try_to_obtain_new_task_empty(self, mc_get):

-         mc_get.return_value.json.return_value = {"builds": []}

+         mc_get.return_value.json.return_value = []

          assert len(self.importer.try_to_obtain_new_tasks()) is 0

  

      def test_try_to_obtain_handle_error(self, mc_get):
@@ -72,17 +72,17 @@ 

              assert len(self.importer.try_to_obtain_new_tasks()) is 0

  

      def test_try_to_obtain_ok(self, mc_get):

-         mc_get.return_value.json.return_value = {"builds": [self.url_task_data, self.upload_task_data]}

+         mc_get.return_value.json.return_value = [self.url_task_data, self.upload_task_data]

          task = self.importer.try_to_obtain_new_tasks()[0]

-         assert task.task_id == self.url_task_data["task_id"]

+         assert task.build_id == self.url_task_data["build_id"]

          assert task.owner == self.USER_NAME

          assert self.BRANCH in task.branches

          assert task.srpm_url == "http://example.com/pkg.src.rpm"

  

      def test_try_to_obtain_ok_2(self, mc_get):

-         mc_get.return_value.json.return_value = {"builds": [self.upload_task_data, self.url_task_data]}

+         mc_get.return_value.json.return_value = [self.upload_task_data, self.url_task_data]

          task = self.importer.try_to_obtain_new_tasks()[0]

-         assert task.task_id == self.upload_task_data["task_id"]

+         assert task.build_id == self.upload_task_data["build_id"]

          assert task.owner == self.USER_NAME

          assert self.BRANCH in task.branches

          assert task.srpm_url == "http://front/tmp/tmp_2/pkg_2.src.rpm"
@@ -90,9 +90,9 @@ 

      def test_try_to_obtain_new_task_unknown_source_type_ok_3(self, mc_get):

          task_data = copy.deepcopy(self.url_task_data)

          task_data["source_type"] = 999999

-         mc_get.return_value.json.return_value = {"builds": [task_data]}

+         mc_get.return_value.json.return_value = [task_data]

          task = self.importer.try_to_obtain_new_tasks()[0]

-         assert task.task_id == task_data["task_id"]

+         assert task.build_id == task_data["build_id"]

  

      def test_post_back(self, mc_post):

          dd = {"foo": "bar"}
@@ -127,9 +127,9 @@ 

          assert mc_import_package.call_args[0][3] == 'somepath.src.rpm'

  

          print self.importer.post_back_safe.has_calls([

-             call({'task_id': 125, 'pkg_name': 'foo', 'branch': self.BRANCH,

+             call({'build_id': 125, 'pkg_name': 'foo', 'branch': self.BRANCH,

                    'pkg_version': '1.2', 'git_hash': '123', 'repo_name': 'foo'}),

-             call({'task_id': 125, 'pkg_name': 'foo', 'branch': self.BRANCH2,

+             call({'build_id': 125, 'pkg_name': 'foo', 'branch': self.BRANCH2,

                    'pkg_version': '1.2', 'git_hash': '124', 'repo_name': 'foo'})

          ])

  
@@ -150,4 +150,4 @@ 

          self.importer.do_import.side_effect = stop_run

          self.importer.run()

          mc_worker.assert_called_with(target=self.importer.do_import, args=[self.url_task],

-                                      id=self.url_task.task_id, timeout=mock.ANY)

+                                      id=self.url_task.build_id, timeout=mock.ANY)

@@ -69,5 +69,5 @@ 

          pool.terminate_timeouted(callback=send_to_fe)

          w.join()

  

-         send_to_fe.assert_called_with({"task_id": "foo", "error": "import_timeout_exceeded"})

+         send_to_fe.assert_called_with({"build_id": "foo", "error": "import_timeout_exceeded"})

          assert not pool[0].is_alive()

@@ -0,0 +1,28 @@ 

+ """add some indeces for faster build selection

+ 

+ Revision ID: 26bf5b9a4dd0

+ Revises: 3576fc77fb31

+ Create Date: 2018-01-24 13:36:54.465668

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '26bf5b9a4dd0'

+ down_revision = '3576fc77fb31'

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ 

+ 

+ def upgrade():

+     ### commands auto generated by Alembic - please adjust! ###

+     op.create_index('build_filter', 'build', ['source_type', 'canceled'], unique=False)

+     op.create_index('build_order', 'build', ['is_background', 'id'], unique=False)

+     ### end Alembic commands ###

+ 

+ 

+ def downgrade():

+     ### commands auto generated by Alembic - please adjust! ###

+     op.drop_index('build_order', table_name='build')

+     op.drop_index('build_filter', table_name='build')

+     ### end Alembic commands ###

@@ -0,0 +1,26 @@ 

+ """ Added source_status to Build

+ 

+ Revision ID: 3576fc77fb31

+ Revises: 4edb1ca2a13f

+ Create Date: 2018-01-20 10:14:10.741230

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '3576fc77fb31'

+ down_revision = '4edb1ca2a13f'

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ 

+ 

+ def upgrade():

+     ### commands auto generated by Alembic - please adjust! ###

+     op.add_column('build', sa.Column('source_status', sa.Integer(), nullable=True))

+     ### end Alembic commands ###

+ 

+ 

+ def downgrade():

+     ### commands auto generated by Alembic - please adjust! ###

+     op.drop_column('build', 'source_status')

+     ### end Alembic commands ###

@@ -0,0 +1,84 @@ 

+ """update_db_functions

+ 

+ Revision ID: 465202bfb9ce

+ Revises: 26bf5b9a4dd0

+ Create Date: 2018-01-24 16:38:43.500159

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '465202bfb9ce'

+ down_revision = '26bf5b9a4dd0'

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ 

+ 

+ def upgrade():

+     query_functions = """

+ CREATE OR REPLACE FUNCTION status_to_order (x integer)

+ RETURNS integer AS $$ BEGIN

+         RETURN CASE WHEN x = 3 THEN 1

+                     WHEN x = 6 THEN 2

+                     WHEN x = 7 THEN 3

+                     WHEN x = 4 THEN 4

+                     WHEN x = 0 THEN 5

+                     WHEN x = 1 THEN 6

+                     WHEN x = 5 THEN 7

+                     WHEN x = 2 THEN 8

+                     WHEN x = 8 THEN 9

+                     WHEN x = 9 THEN 10

+                ELSE x

+         END; END;

+     $$ LANGUAGE plpgsql;

+ 

+ CREATE OR REPLACE FUNCTION order_to_status (x integer)

+ RETURNS integer AS $$ BEGIN

+         RETURN CASE WHEN x = 1 THEN 3

+                     WHEN x = 2 THEN 6

+                     WHEN x = 3 THEN 7

+                     WHEN x = 4 THEN 4

+                     WHEN x = 5 THEN 0

+                     WHEN x = 6 THEN 1

+                     WHEN x = 7 THEN 5

+                     WHEN x = 8 THEN 2

+                     WHEN x = 9 THEN 8

+                     WHEN x = 10 THEN 9

+                ELSE x

+         END; END;

+     $$ LANGUAGE plpgsql;

+     """

+     op.execute(sa.text(query_functions))

+ 

+ 

+ def downgrade():

+     query_functions = """

+ CREATE OR REPLACE FUNCTION status_to_order (x integer)

+ RETURNS integer AS $$ BEGIN

+         RETURN CASE WHEN x = 3 THEN 1

+                     WHEN x = 6 THEN 2

+                     WHEN x = 7 THEN 3

+                     WHEN x = 4 THEN 4

+                     WHEN x = 0 THEN 5

+                     WHEN x = 1 THEN 6

+                     WHEN x = 5 THEN 7

+                     WHEN x = 2 THEN 8

+                ELSE x

+         END; END;

+     $$ LANGUAGE plpgsql;

+ 

+ CREATE OR REPLACE FUNCTION order_to_status (x integer)

+ RETURNS integer AS $$ BEGIN

+         RETURN CASE WHEN x = 1 THEN 3

+                     WHEN x = 2 THEN 6

+                     WHEN x = 3 THEN 7

+                     WHEN x = 4 THEN 4

+                     WHEN x = 5 THEN 0

+                     WHEN x = 6 THEN 1

+                     WHEN x = 7 THEN 5

+                     WHEN x = 8 THEN 2

+                ELSE x

+         END; END;

+     $$ LANGUAGE plpgsql;

+     """

+     op.execute(sa.text(query_functions))

@@ -86,5 +86,10 @@ 

  class UnknownSourceTypeException(Exception):

      pass

  

+ 

  class NoPackageSourceException(Exception):

      pass

+ 

+ 

+ class UnrepeatableBuildException(Exception):

+     pass

@@ -203,7 +203,9 @@ 

          "pending": "Your build is waiting for a builder.",

          "skipped": "This package has already been built previously.",

          "starting": "Trying to acquire and configure builder for task.",

-         "importing": "Package content is being imported into Dist Git."

+         "importing": "Package content is being imported into DistGit.",

+         "waiting": "Task is waiting for something else to finish.",

+         "imported": "Package was successfully imported into DistGit.",

      }

  

      return description_map.get(state, "")

@@ -88,17 +88,19 @@ 

  

  

  class StatusEnum(with_metaclass(EnumType, object)):

-     vals = {"failed": 0,

-             "succeeded": 1,

-             "canceled": 2,

-             "running": 3,

-             "pending": 4,

-             "skipped": 5,  # if there was this package built already

-             "starting": 6,  # build picked by worker but no VM initialized

-             "importing": 7, # SRPM is being imported to dist-git

-             "forked": 8, # build(-chroot) was forked

-             "unknown": 1000, # order_to_status/status_to_order issue

-            }

+     vals = {

+         "failed": 0,     # build failed

+         "succeeded": 1,  # build succeeded

+         "canceled": 2,   # build was canceled

+         "running": 3,    # SRPM or RPM build is running

+         "pending": 4,    # build(-chroot) is waiting to be picked

+         "skipped": 5,    # if there was this package built already

+         "starting": 6,   # build was picked by worker but no VM initialized yet

+         "importing": 7,  # SRPM is being imported into dist-git

+         "forked": 8,     # build(-chroot) was forked

+         "waiting": 9,    # build(-chroot) is waiting for something else to finish

+         "unknown": 1000, # undefined

+     }

  

  

  class ModuleStatusEnum(with_metaclass(EnumType, object)):

@@ -22,7 +22,7 @@ 

  from coprs import models

  from coprs import helpers

  from coprs.constants import DEFAULT_BUILD_TIMEOUT, MAX_BUILD_TIMEOUT

- from coprs.exceptions import MalformedArgumentException, ActionInProgressException, InsufficientRightsException

+ from coprs.exceptions import MalformedArgumentException, ActionInProgressException, InsufficientRightsException, UnrepeatableBuildException

  from coprs.helpers import StatusEnum

  

  from coprs.logic import coprs_logic
@@ -77,12 +77,10 @@ 

          """

          Returns Builds which are waiting to be uploaded to dist git

          """

-         query = (models.Build.query.join(models.BuildChroot)

+         query = (models.Build.query

                   .filter(models.Build.canceled == false())

-                  .filter(models.BuildChroot.status == helpers.StatusEnum("importing"))

-                  .filter(models.Build.srpm_url.isnot(None))

-                 )

-         query = query.order_by(models.BuildChroot.build_id.asc())

+                  .filter(models.Build.source_status == helpers.StatusEnum("importing")))

+         query = query.order_by(models.Build.id.asc())

          return query

  

      @classmethod
@@ -118,16 +116,15 @@ 

          return query

  

      @classmethod

-     def get_waiting_srpm_build_tasks(cls):

-         return (models.Build.query.join(models.BuildChroot)

-                 .filter(models.Build.srpm_url.is_(None))

+     def get_pending_srpm_build_tasks(cls):

+         return (models.Build.query

                  .filter(models.Build.canceled == false())

-                 .filter(models.BuildChroot.status == helpers.StatusEnum("importing"))

+                 .filter(models.Build.source_status == helpers.StatusEnum("pending"))

                  .order_by(models.Build.is_background.asc(), models.Build.id.asc())

                  .all())

  

      @classmethod

-     def get_waiting_build_tasks(cls):

+     def get_pending_build_tasks(cls):

          return (models.BuildChroot.query.join(models.Build)

                  .filter(models.Build.canceled == false())

                  .filter(or_(
@@ -138,7 +135,7 @@ 

                          models.BuildChroot.ended_on.is_(None)

                      )

                  ))

-                 .order_by(models.Build.is_background.asc(), models.BuildChroot.build_id.asc())

+                 .order_by(models.Build.is_background.asc(), models.Build.id.asc())

                  .all())

  

      @classmethod
@@ -216,7 +213,7 @@ 

      @classmethod

      def get_copr_builds_list(cls, copr):

          query_select = """

- SELECT build.id, MAX(package.name) AS pkg_name, build.pkg_version, build.submitted_on,

+ SELECT build.id, build.source_status, MAX(package.name) AS pkg_name, build.pkg_version, build.submitted_on,

      MIN(statuses.started_on) AS started_on, MAX(statuses.ended_on) AS ended_on, order_to_status(MIN(statuses.st)) AS status,

      build.canceled, MIN("group".name) AS group_name, MIN(copr.name) as copr_name, MIN("user".username) as user_name

  FROM build
@@ -251,8 +248,12 @@ 

                      return 6

                  elif x == 5:

                      return 7

-                 elif x == 8:

+                 elif x == 2:

                      return 8

+                 elif x == 8:

+                     return 9

+                 elif x == 9:

+                     return 10

                  return 1000

  

              def sqlite_order_to_status(x):
@@ -271,7 +272,11 @@ 

                  elif x == 7:

                      return 5

                  elif x == 8:

+                     return 2

+                 elif x == 9:

                      return 8

+                 elif x == 10:

+                     return 9

                  return 1000

  

              conn = db.engine.connect()
@@ -352,19 +357,12 @@ 

          git_hashes = {}

  

          if source_build.source_type == helpers.BuildSourceEnum('upload'):

-             # I don't have the source

-             # so I don't want to import anything, just rebuild what's in dist git

-             skip_import = True

- 

-             for chroot in source_build.build_chroots:

-                 if not chroot.git_hash:

-                     # I got an old build from time we didn't use dist git

-                     # So I'll submit it as a new build using it's link

-                     skip_import = False

-                     git_hashes = None

-                     flask.flash("This build is not in Dist Git. Trying to import the package again.")

-                     break

-                 git_hashes[chroot.name] = chroot.git_hash

+             if source_build.repeatable:

+                 skip_import = True

+                 for chroot in source_build.build_chroots:

+                     git_hashes[chroot.name] = chroot.git_hash

+             else:

+                 raise UnrepeatableBuildException("Build sources were not fully imported into CoprDistGit.")

  

          build = cls.create_new(user, copr, source_build.source_type, source_build.source_json, chroot_names,

                                      pkgs=source_build.pkgs, git_hashes=git_hashes, skip_import=skip_import,
@@ -531,6 +529,7 @@ 

              repos=None, chroots=None, timeout=None, enable_net=True,

              git_hashes=None, skip_import=False, background=False, batch=None,

              srpm_url=None):

+ 

          if chroots is None:

              chroots = []

  
@@ -553,6 +552,16 @@ 

              source_type = helpers.BuildSourceEnum("link")

              source_json = json.dumps({"url":pkgs})

  

+         if skip_import and srpm_url:

+             chroot_status = StatusEnum("pending")

+             source_status = StatusEnum("succeeded")

+         elif srpm_url:

+             chroot_status = StatusEnum("waiting")

+             source_status = StatusEnum("importing")

+         else:

+             chroot_status = StatusEnum("waiting")

+             source_status = StatusEnum("pending")

+ 

          build = models.Build(

              user=user,

              pkgs=pkgs,
@@ -560,6 +569,7 @@ 

              repos=repos,

              source_type=source_type,

              source_json=source_json,

+             source_status=source_status,

              submitted_on=int(time.time()),

              enable_net=bool(enable_net),

              is_background=bool(background),
@@ -577,18 +587,13 @@ 

          if not chroots:

              chroots = copr.active_chroots

  

-         if skip_import:

-             status = StatusEnum("pending")

-         else:

-             status = StatusEnum("importing")

- 

          for chroot in chroots:

              git_hash = None

              if git_hashes:

                  git_hash = git_hashes.get(chroot.name)

              buildchroot = models.BuildChroot(

                  build=build,

-                 status=status,

+                 status=chroot_status,

                  mock_chroot=chroot,

                  git_hash=git_hash,

              )
@@ -690,21 +695,40 @@ 

                 }]

              }

          """

-         log.info("Updating build: {} by: {}".format(build.id, upd_dict))

-         if "chroot" in upd_dict:

-             if upd_dict["chroot"] == "srpm-builds":

+         log.info("Updating build {} by: {}".format(build.id, upd_dict))

+ 

+         # update build

+         for attr in ["results", "built_packages", "srpm_url"]:

+             value = upd_dict.get(attr, None)

+             if value:

+                 setattr(build, attr, value)

+ 

+         # update source build status

+         if upd_dict.get("task_id") == build.task_id:

+             if upd_dict.get("status") == StatusEnum("succeeded"):

+                 new_status = StatusEnum("importing")

+             else:

+                 new_status = upd_dict.get("status")

+ 

+             build.source_status = new_status

+ 

+             if new_status == StatusEnum("failed") or \

+                    new_status == StatusEnum("skipped"):

+                 for ch in build.build_chroots:

+                     ch.status = new_status

+                     ch.ended_on = upd_dict.get("ended_on") or time.time()

+                     db.session.add(ch)

+ 

+             if new_status == StatusEnum("failed"):

+                 build.fail_type = helpers.FailTypeEnum("srpm_build_error")

  

-                 if upd_dict.get("status") == StatusEnum("failed") and not build.canceled:

-                     build.fail_type = helpers.FailTypeEnum("srpm_build_error")

-                     for ch in build.build_chroots:

-                         ch.status = helpers.StatusEnum("failed")

-                         ch.ended_on = upd_dict.get("ended_on") or time.time()

-                         db.session.add(ch)

+             db.session.add(build)

+             return

  

+         if "chroot" in upd_dict:

              # update respective chroot status

              for build_chroot in build.build_chroots:

                  if build_chroot.name == upd_dict["chroot"]:

- 

                      if "status" in upd_dict and build_chroot.status not in BuildsLogic.terminal_states:

                          build_chroot.status = upd_dict["status"]

  
@@ -723,11 +747,6 @@ 

                              and all(b.status == StatusEnum("succeeded") for b in build.module.builds)):

                          ActionsLogic.send_build_module(build.copr, build.module)

  

-         for attr in ["results", "built_packages", "srpm_url"]:

-             value = upd_dict.get(attr, None)

-             if value:

-                 setattr(build, attr, value)

- 

          db.session.add(build)

  

      @classmethod
@@ -737,6 +756,7 @@ 

                  "You are not allowed to cancel this build.")

          if not build.cancelable:

              if build.status == StatusEnum("starting"):

+                 # this is not intuitive, that's why we provide more specific message

                  err_msg = "Cannot cancel build {} in state 'starting'".format(build.id)

              else:

                  err_msg = "Cannot cancel build {}".format(build.id)

@@ -36,7 +36,7 @@ 

      @classmethod

      def get_copr_packages_list(cls, copr):

          query_select = """

- SELECT package.name, build.pkg_version, build.submitted_on, package.webhook_rebuild, order_to_status(subquery2.min_order_for_a_build) AS status

+ SELECT package.name, build.pkg_version, build.submitted_on, package.webhook_rebuild, order_to_status(subquery2.min_order_for_a_build) AS status, build.source_status

  FROM package

  LEFT OUTER JOIN (select MAX(build.id) as max_build_id_for_a_package, package_id

    FROM build
@@ -51,9 +51,7 @@ 

  

          if db.engine.url.drivername == "sqlite":

              def sqlite_status_to_order(x):

-                 if x == 0:

-                     return 0

-                 elif x == 3:

+                 if x == 3:

                      return 1

                  elif x == 6:

                      return 2
@@ -61,16 +59,22 @@ 

                      return 3

                  elif x == 4:

                      return 4

-                 elif x == 1:

+                 elif x == 0:

                      return 5

-                 elif x == 5:

+                 elif x == 1:

                      return 6

+                 elif x == 5:

+                     return 7

+                 elif x == 2:

+                     return 8

+                 elif x == 8:

+                     return 9

+                 elif x == 9:

+                     return 10

                  return 1000

  

              def sqlite_order_to_status(x):

-                 if x == 0:

-                     return 0

-                 elif x == 1:

+                 if x == 1:

                      return 3

                  elif x == 2:

                      return 6
@@ -79,9 +83,17 @@ 

                  elif x == 4:

                      return 4

                  elif x == 5:

-                     return 1

+                     return 0

                  elif x == 6:

+                     return 1

+                 elif x == 7:

                      return 5

+                 elif x == 8:

+                     return 2

+                 elif x == 9:

+                     return 8

+                 elif x == 10:

+                     return 9

                  return 1000

  

              conn = db.engine.connect()

@@ -276,7 +276,6 @@ 

          """

          Return list of active mock_chroots of this copr

          """

- 

          return filter(lambda x: x.is_active, self.mock_chroots)

  

      @property
@@ -503,7 +502,9 @@ 

      """

      Representation of one build in one copr

      """

-     __table_args__ = (db.Index('build_canceled', "canceled"), )

+     __table_args__ = (db.Index('build_canceled', "canceled"),

+                       db.Index('build_order', "is_background", "id"),

+                       db.Index('build_filter', "source_type", "canceled"))

  

      id = db.Column(db.Integer, primary_key=True)

      # single url to the source rpm, should not contain " ", "\n", "\t"
@@ -537,6 +538,7 @@ 

      # background builds has lesser priority than regular builds.

      is_background = db.Column(db.Boolean, default=False, server_default="0", nullable=False)

  

+     source_status = db.Column(db.Integer, default=StatusEnum("waiting"))

      srpm_url = db.Column(db.Text)

  

      # relations
@@ -701,78 +703,56 @@ 

          return {b.name: b for b in self.build_chroots}

  

      @property

-     def has_pending_chroot(self):

-         # FIXME bad name

-         # used when checking if the repo is initialized and results can be set

-         # i think this is the only purpose - check

-         return StatusEnum("pending") in self.chroot_states or \

-             StatusEnum("starting") in self.chroot_states

- 

-     @property

-     def has_unfinished_chroot(self):

-         return StatusEnum("pending") in self.chroot_states or \

-             StatusEnum("starting") in self.chroot_states or \

-             StatusEnum("running") in self.chroot_states

- 

-     @property

-     def has_importing_chroot(self):

-         return StatusEnum("importing") in self.chroot_states

- 

-     @property

      def status(self):

          """

-         Return build status according to build status of its chroots

+         Return build status.

          """

          if self.canceled:

              return StatusEnum("canceled")

  

-         for state in ["running", "starting", "importing", "pending", "failed", "succeeded", "skipped", "forked"]:

+         for state in ["running", "starting", "pending", "failed", "succeeded", "skipped", "forked", "waiting"]:

              if StatusEnum(state) in self.chroot_states:

-                 return StatusEnum(state)

+                 if state == "waiting":

+                     return self.source_status

+                 else:

+                     return StatusEnum(state)

+ 

+         return None

  

      @property

      def state(self):

          """

-         Return text representation of status of this build

+         Return text representation of status of this build.

          """

- 

-         if self.status is not None:

+         if self.status != None:

              return StatusEnum(self.status)

- 

          return "unknown"

  

      @property

      def cancelable(self):

          """

          Find out if this build is cancelable.

- 

-         Build is cancelabel only when it's pending (not started)

          """

- 

-         return self.status == StatusEnum("pending") or \

-             self.status == StatusEnum("importing") or \

-             self.status == StatusEnum("running")

+         return not self.finished and self.status != StatusEnum("starting")

  

      @property

      def repeatable(self):

          """

          Find out if this build is repeatable.

  

-         Build is repeatable only if it's not pending, starting or running

+         Build is repeatable only if sources has been imported.

          """

-         return self.status not in [StatusEnum("pending"),

-                                    StatusEnum("starting"),

-                                    StatusEnum("running"),

-                                    StatusEnum("forked")]

+         return self.source_status == StatusEnum("succeeded")

  

      @property

      def finished(self):

          """

          Find out if this build is in finished state.

  

-         Build is finished only if all its build_chroots are in finished state.

+         Build is finished only if all its build_chroots are in finished state or

+         the build was canceled.

          """

-         return all([(chroot.state in ["succeeded", "forked", "canceled", "skipped", "failed"]) for chroot in self.build_chroots])

+         return self.canceled or all([chroot.finished for chroot in self.build_chroots])

  

      @property

      def persistent(self):
@@ -786,7 +766,7 @@ 

      @property

      def src_pkg_name(self):

          """

-         Extract source package name from source name or url

+         Extract source package name from source name or url.

          todo: obsolete

          """

          try:
@@ -983,7 +963,7 @@ 

                           primary_key=True)

      build = db.relationship("Build", backref=db.backref("build_chroots"))

      git_hash = db.Column(db.String(40))

-     status = db.Column(db.Integer, default=StatusEnum("importing"))

+     status = db.Column(db.Integer, default=StatusEnum("waiting"))

  

      started_on = db.Column(db.Integer)

      ended_on = db.Column(db.Integer, index=True)
@@ -1004,10 +984,13 @@ 

          """

          if self.status is not None:

              return StatusEnum(self.status)

- 

          return "unknown"

  

      @property

+     def finished(self):

+         return (self.state in ["succeeded", "forked", "canceled", "skipped", "failed"])

+ 

+     @property

      def task_id(self):

          return "{}-{}".format(self.build_id, self.name)

  

@@ -25,3 +25,7 @@ 

  code {

      white-space: pre-wrap;

  }

+ 

+ span.build-waiting {

+     color: #666666;

+ }

@@ -120,6 +120,8 @@ 

      <span class="pficon pficon-error-circle-o"></span> {{ state }}

    {% elif state == "canceled" %}

      <span class="pficon pficon-close"></span> {{ state }}

+   {% elif state == "waiting" %}

+     <span class="glyphicon glyphicon-hourglass"></span> {{ state }}

    {% else %}

      <span> unknown </span>

    {% endif %}
@@ -130,7 +132,11 @@ 

    {% if build.canceled %}

      {{ build_state_text("canceled") }}

    {% else %}

-     {{ build_state_text(build.status | state_from_num) }}

+     {% if build.status|state_from_num == "waiting" %}

+       {{ build_state_text(build.source_status|state_from_num) }}

+     {% else %}

+       {{ build_state_text(build.status|state_from_num) }}

+     {% endif %}

    {% endif %}

  {% endmacro %}

  

@@ -11,7 +11,8 @@ 

                "succeeded",

                "skipped",

                "failed",

-               "canceled"

+               "canceled",

+               "waiting"

              ]%}

              <dd>

                  {{ build_state_text(state) }}

@@ -174,14 +174,12 @@ 

  {% endmacro %}

  

  {% macro copr_build_repeat_form(build, page, class="") %}

-   {% if build.repeatable %}

      <form class="{{class}}" action="{{ copr_url('coprs_ns.copr_repeat_build', build.copr, build_id=build.id) }}" method="post">

        <input type="hidden" value="page" value="{{ page }}">

        <button class="btn btn-default" type="submit">

          <span class="pficon pficon-restart"></span> Resubmit

        </button>

      </form>

-   {% endif %}

  {% endmacro %}

  

  {% macro copr_build_delete_form(build, page, class="") %}

@@ -23,105 +23,82 @@ 

  

  log = logging.getLogger(__name__)

  

+ 

  @backend_ns.route("/importing/")

- # FIXME I'm commented

- #@misc.backend_authenticated

  def dist_git_importing_queue():

      """

-     Return list of builds that are waiting for dist git to import the sources.

+     Return list of builds that are waiting for dist-git to import the sources.

      """

-     builds_list = []

-     builds_for_import = BuildsLogic.get_build_importing_queue().filter(models.Build.is_background == false()).limit(200).all()

+     tasks = []

+ 

+     builds_for_import = BuildsLogic.get_build_importing_queue().filter(models.Build.is_background == false()).limit(100).all()

      if not builds_for_import:

-         builds_for_import = BuildsLogic.get_build_importing_queue().filter(models.Build.is_background == true()).limit(30)

+         builds_for_import = BuildsLogic.get_build_importing_queue().filter(models.Build.is_background == true()).limit(30).all()

  

-     for task in builds_for_import:

-         copr = task.copr

+     for build in builds_for_import:

          branches = set()

-         for b_ch in task.build_chroots:

+         for b_ch in build.build_chroots:

              branches.add(b_ch.mock_chroot.distgit_branch_name)

  

-         task_dict = {

-             "task_id": task.task_id,

-             "owner": copr.owner_name,

-             "project": copr.name,

+         tasks.append({

+             "build_id": build.id,

+             "owner": build.copr.owner_name,

+             "project": build.copr.name,

              "branches": list(branches),

-             "srpm_url": task.srpm_url,

-         }

-         if task_dict not in builds_list:

-             builds_list.append(task_dict)

- 

-     response_dict = {"builds": builds_list}

+             "srpm_url": build.srpm_url,

+         })

  

-     return flask.jsonify(response_dict)

+     return flask.jsonify(tasks)

  

  

  @backend_ns.route("/import-completed/", methods=["POST", "PUT"])

  @misc.backend_authenticated

  def dist_git_upload_completed():

-     """

-     Mark BuildChroot in a Build as uploaded, which means:

-         - set it to pending state

-         - set BuildChroot.git_hash

-         - if it's the last BuildChroot in a Build:

-             - delete local source

-     BuildChroot is identified with task_id which is build id + git branch name

-         - For example: 56-f22 -> build 55, chroots fedora-22-*

-     """

-     result = {"updated": False}

- 

-     if "task_id" in flask.request.json and 'branch' in flask.request.json:

-         app.logger.debug(flask.request.data)

-         task_id = flask.request.json["task_id"]

-         branch = flask.request.json["branch"]

-         build_chroots = BuildsLogic.get_buildchroots_by_build_id_and_branch(task_id, branch)

-         build = build_chroots[0].build

- 

-         # Is it OK?

-         if "git_hash" in flask.request.json and "repo_name" in flask.request.json:

-             git_hash = flask.request.json["git_hash"]

-             pkg_name = flask.request.json["pkg_name"]

-             pkg_version = flask.request.json["pkg_version"]

- 

-             # Now I need to assign a package to this build

-             if not PackagesLogic.get(build.copr.id, pkg_name).first():

-                 try:

-                     package = PackagesLogic.add(build.copr.user, build.copr, pkg_name, build.source_type, build.source_json)

-                     db.session.add(package)

-                     db.session.commit()

-                 except (sqlalchemy.exc.IntegrityError, exceptions.DuplicateException) as e:

-                     db.session.rollback()

- 

-             package = PackagesLogic.get(build.copr.id, pkg_name).first()

-             build.package_id = package.id

-             build.pkg_version = pkg_version

- 

-             for ch in build_chroots:

-                 if ch.status == helpers.StatusEnum("importing"):

-                     ch.status = helpers.StatusEnum("pending")

-                 ch.git_hash = git_hash

- 

-         # Failed?

-         elif "error" in flask.request.json:

-             error_type = flask.request.json["error"]

+     app.logger.debug(flask.request.json)

+     build_id = flask.request.json.get("build_id")

+     pkg_name = flask.request.json.get("pkg_name")

+     pkg_version = flask.request.json.get("pkg_evr")

+ 

+     try:

+         build = ComplexLogic.get_build_safe(build_id)

+     except ObjectNotFound:

+         return flask.jsonify({"updated": False})

+ 

+     collected_branch_chroots = []

+     for branch, git_hash in flask.request.json.get("branch_commits", {}).items():

+         branch_chroots = BuildsLogic.get_buildchroots_by_build_id_and_branch(build_id, branch)

  

+         if not PackagesLogic.get(build.copr.id, pkg_name).first():

              try:

-                 build.fail_type = helpers.FailTypeEnum(error_type)

-             except KeyError:

-                 build.fail_type = helpers.FailTypeEnum("unknown_error")

+                 package = PackagesLogic.add(build.copr.user, build.copr, pkg_name, build.source_type, build.source_json)

+                 db.session.add(package)

+                 db.session.commit()

+             except (sqlalchemy.exc.IntegrityError, exceptions.DuplicateException) as e:

+                 db.session.rollback()

  

-             for ch in build_chroots:

-                 ch.status = helpers.StatusEnum("failed")

+         package = PackagesLogic.get(build.copr.id, pkg_name).first()

+         build.package_id = package.id

+         build.pkg_version = pkg_version

  

-         # is it the last chroot?

-         if not build.has_importing_chroot:

-             BuildsLogic.delete_local_source(build)

+         for ch in branch_chroots:

+             ch.status = StatusEnum("pending")

+             ch.git_hash = git_hash

+             db.session.add(ch)

+             collected_branch_chroots.append((ch.task_id))

  

-         db.session.commit()

+     final_source_status = StatusEnum("succeeded")

+     for ch in build.build_chroots:

+         if ch.task_id not in collected_branch_chroots:

+             final_source_status = StatusEnum("failed")

+             ch.status = StatusEnum("failed")

+             db.session.add(ch)

  

-         result.update({"updated": True})

+     build.source_status = final_source_status

+     db.session.add(build)

+     db.session.commit()

  

-     return flask.jsonify(result)

+     BuildsLogic.delete_local_source(build)

+     return flask.jsonify({"updated": True})

  

  

  def get_build_record(task):
@@ -168,8 +145,8 @@ 

  

      try:

          build_record = {

+             "task_id": task.task_id,

              "build_id": task.id,

-             "task_id": task.id,

              "project_owner": task.copr.owner_name,

              "project_name": task.copr.name,

              "source_type": task.source_type,
@@ -182,9 +159,8 @@ 

      return build_record

  

  

- @backend_ns.route("/waiting-action/")

- #@misc.backend_authenticated

- def waiting_action():

+ @backend_ns.route("/pending-action/")

+ def pending_action():

      """

      Return a single action.

      """
@@ -197,14 +173,13 @@ 

      return flask.jsonify(action_record)

  

  

- @backend_ns.route("/waiting-jobs/")

- #@misc.backend_authenticated

- def waiting_jobs():

+ @backend_ns.route("/pending-jobs/")

+ def pending_jobs():

      """

      Return the job queue.

      """

-     build_records = ([get_build_record(task) for task in BuildsLogic.get_waiting_build_tasks()] +

-                      [get_srpm_build_record(task) for task in BuildsLogic.get_waiting_srpm_build_tasks()])

+     build_records = ([get_build_record(task) for task in BuildsLogic.get_pending_build_tasks()] +

+                      [get_srpm_build_record(task) for task in BuildsLogic.get_pending_srpm_build_tasks()])

      log.info('Selected build records: {}'.format(build_records))

      return flask.jsonify(build_records)

  
@@ -273,33 +248,25 @@ 

  @misc.backend_authenticated

  def starting_build():

      """

-     Check if the build is not cancelled and set it to running state

+     Check if the build is not cancelled and set it to starting state

      """

+     data = flask.request.json

  

-     result = {"can_start": False}

- 

-     if "build_id" in flask.request.json and "chroot" in flask.request.json:

-         build = ComplexLogic.get_build_safe(flask.request.json["build_id"])

-         chroot = flask.request.json.get("chroot")

+     try:

+         build = ComplexLogic.get_build_safe(data.get('build_id'))

+     except ObjectNotFound:

+         return flask.jsonify({"can_start": False})

  

-         if build and chroot and not build.canceled:

-             log.info("mark build {} chroot {} as starting".format(build.id, chroot))

-             BuildsLogic.update_state_from_dict(build, {

-                 "chroot": chroot,

-                 "status": StatusEnum("starting")

-             })

-             db.session.commit()

-             result["can_start"] = True

+     if build.canceled:

+         return flask.jsonify({"can_start": False})

  

-     return flask.jsonify(result)

+     BuildsLogic.update_state_from_dict(build, data)

+     return flask.jsonify({"can_start": True})

  

  

  @backend_ns.route("/reschedule_all_running/", methods=["POST"])

  @misc.backend_authenticated

  def reschedule_all_running():

-     """

-     Add-hoc handle. Remove after implementation of persistent task handling in copr-backend

-     """

      to_reschedule = \

          BuildsLogic.get_build_tasks(StatusEnum("starting")).all() + \

          BuildsLogic.get_build_tasks(StatusEnum("running")).all()

@@ -21,7 +21,8 @@ 

  from coprs.views.coprs_ns import coprs_ns

  

  from coprs.exceptions import (ActionInProgressException,

-                               InsufficientRightsException,)

+                               InsufficientRightsException,

+                               UnrepeatableBuildException)

  

  

  @coprs_ns.route("/build/<int:build_id>/")
@@ -158,7 +159,7 @@ 

          try:

              create_new_build_factory(**build_options)

              db.session.commit()

-         except (ActionInProgressException, InsufficientRightsException) as e:

+         except (ActionInProgressException, InsufficientRightsException, UnrepeatableBuildException) as e:

              db.session.rollback()

              flask.flash(str(e), "error")

          else:

@@ -238,13 +238,13 @@ 

              copr=self.c2, name="goodbye-world", source_type=0)

  

          self.b1 = models.Build(

-             copr=self.c1, package=self.p1, user=self.u1, submitted_on=50, srpm_url="http://somesrpm")

+             copr=self.c1, package=self.p1, user=self.u1, submitted_on=50, srpm_url="http://somesrpm", source_status=StatusEnum("importing"))

          self.b2 = models.Build(

-             copr=self.c1, package=self.p1, user=self.u2, submitted_on=10, srpm_url="http://somesrpm")

+             copr=self.c1, package=self.p1, user=self.u2, submitted_on=10, srpm_url="http://somesrpm", source_status=StatusEnum("importing"))

          self.b3 = models.Build(

-             copr=self.c2, package=self.p2, user=self.u2, submitted_on=10, srpm_url="http://somesrpm")

+             copr=self.c2, package=self.p2, user=self.u2, submitted_on=10, srpm_url="http://somesrpm", source_status=StatusEnum("importing"))

          self.b4 = models.Build(

-             copr=self.c2, package=self.p2, user=self.u2, submitted_on=100, srpm_url="http://somesrpm")

+             copr=self.c2, package=self.p2, user=self.u2, submitted_on=100, srpm_url="http://somesrpm", source_status=StatusEnum("succeeded"))

  

          self.basic_builds = [self.b1, self.b2, self.b3, self.b4]

          self.b1_bc = []

@@ -9,7 +9,7 @@ 

  

      def test_no_waiting_builds(self):

          assert b'[]' in self.tc.get(

-             "/backend/waiting-jobs/", headers=self.auth_header).data

+             "/backend/pending-jobs/", headers=self.auth_header).data

  

      def test_waiting_build_only_lists_not_started_or_ended(

              self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
@@ -23,7 +23,7 @@ 

  

          self.db.session.commit()

  

-         r = self.tc.get("/backend/waiting-jobs/", headers=self.auth_header)

+         r = self.tc.get("/backend/pending-jobs/", headers=self.auth_header)

          assert json.loads(r.data.decode("utf-8")) == []

  

          for build_chroot in self.b2_bc:
@@ -32,7 +32,7 @@ 

  

          self.db.session.commit()

  

-         r = self.tc.get("/backend/waiting-jobs/", headers=self.auth_header)

+         r = self.tc.get("/backend/pending-jobs/", headers=self.auth_header)

          assert json.loads(r.data.decode("utf-8")) != []

  

  
@@ -43,7 +43,7 @@ 

                  build_chroot.status = 4  # pending

          self.db.session.commit()

  

-         r = self.tc.get("/backend/waiting-jobs/")

+         r = self.tc.get("/backend/pending-jobs/")

          data = json.loads(r.data.decode("utf-8"))

          assert data[0]["build_id"] == 3

  
@@ -165,7 +165,7 @@ 

  

      def test_no_waiting_actions(self):

          assert b'null' in self.tc.get(

-             "/backend/waiting-action/", headers=self.auth_header).data

+             "/backend/pending-action/", headers=self.auth_header).data

  

      def test_waiting_actions_only_lists_not_started_or_ended(

              self, f_users, f_coprs, f_actions, f_db):
@@ -175,7 +175,7 @@ 

  

          self.db.session.commit()

  

-         r = self.tc.get("/backend/waiting-action/", headers=self.auth_header)

+         r = self.tc.get("/backend/pending-action/", headers=self.auth_header)

          assert json.loads(r.data.decode("utf-8")) == None

  

          for a in [self.a1, self.a2, self.a3]:
@@ -183,7 +183,7 @@ 

              self.db.session.add(a)

  

          self.db.session.commit()

-         r = self.tc.get("/backend/waiting-action/", headers=self.auth_header)

+         r = self.tc.get("/backend/pending-action/", headers=self.auth_header)

          assert json.loads(r.data.decode("utf-8")) != None

  

  
@@ -267,7 +267,7 @@ 

  

          r = self.tc.get("/backend/importing/")

          data = json.loads(r.data.decode("utf-8"))

-         assert data["builds"][0]["srpm_url"] == "bar"

+         assert data[0]["srpm_url"] == "bar"

  

      def test_importing_queue_multiple_bg(self, f_users, f_coprs, f_mock_chroots, f_db):

          BuildsLogic.create_new_from_url(self.u1, self.c1, "foo", background=True)
@@ -275,4 +275,4 @@ 

  

          r = self.tc.get("/backend/importing/")

          data = json.loads(r.data.decode("utf-8"))

-         assert data["builds"][0]["srpm_url"] == "foo"

+         assert data[0]["srpm_url"] == "foo"

This PR additionally does the following:

  • adds additional check in backend for duplicate builds
  • adjusts build "resubmit" logic
  • slightly changes starting-build interface

I stumbled upon those things during implementation of the source_status and decided to do them in the same batch.

Anyway, this PR adds tracking of srpm build task status by adding source_status field for Build model. That means, if srpm build is running on builder, it's now correctly reflected in the Build.status. Before the build status was "importing" all the time (even when nothing was being imported because the srpm needed to be built first). Now, the build status is first "importing" only if already-built srpm is provided by user, otherwise it goes through pending->running->importing->imported process if all goes well. When it reaches "imported" (or "failed" if the srpm import fails), the build_chroots statuses are switched from initial "waiting" state (being also a new state together with "imported") to "pending" and the build status starts to be inferred from build_chroot statuses - this is slightly tricky part: First, build.status follows build.source_status and when build.source_status is switched to "imported" or "failed", then build.status starts be inferred from build_chroot statuses (see models.py, line: 704 for definition of the Build.status method).

Unit-tests should be adjusted to test this whole state-toggling thing more thoroughly. So far, I only managed to make them pass.

There should be index on build(source_status, canceled).

I also moved self.vm_manager.release_vm(self.vm.vm_name) in worker.py into finally: clause in Worker:run method. It will do a better job there than before in do_job method because there were cases when the crash occurred before the try-except statement => an exception was raised but allocated VM was not released (this was happening due to the multi-vm per srpm-build race conditions but still...the releasing should better be in the run method above all the do_job things).

...which reminds me that I also fixed build reattaching that didn't really work form srpm builds (it worked for rpm builds) because of comparing int (build.id) with string (task id being fetched from redis that just always seem to return strings ?).

rebased onto b9326e8

6 years ago

2 new commits added

  • [frontend] fixed status_to_order, order_to_status functions, added waiting icon
  • [frontend] add indeces for faster build selection
6 years ago

Don't you want to have separate enum for each field (or keep using the old status columnt)? This way it asks for inconsistencies .. .

Don't you want to have separate enum for each field (or keep using the old status columnt)
No, not really. The status are exactly the same except for imported .vs succeeded and thinking about it, the "imported" could have been just "succeeded"...Oh, there's also the "importing" state. The redundancy here comes from the fact, that the whole importing stage is very likely redundant. I originally wanted to make two separate enums but seeing on how many places the change needs to be done...I just dropped that idea.

build.update({k: v for k, v in upd_dict.items() if k in THE_LIST})

1 new commit added

  • [frontend] remove "imported" state (use just "succeeded")
6 years ago

Merging now even though (as reported) there is this "hack" with reusing StatusEnum for both source build and rpm build stages. That's okay except for the 'importing' stage, which can only belong to the source stage. We will need to address this hack in a next PR.

Pull-Request has been merged by clime

6 years ago
Metadata
Changes Summary 30
+1 -0
file changed
backend/backend/constants.py
+1 -1
file changed
backend/backend/daemons/action_dispatcher.py
+19 -13
file changed
backend/backend/daemons/build_dispatcher.py
+3 -7
file changed
backend/backend/daemons/worker.py
+1 -2
file changed
backend/backend/frontend.py
+1 -0
file changed
backend/backend/mockremote/builder.py
+3 -3
file changed
backend/tests/test_frontend.py
+2 -2
file changed
dist-git/dist_git/import_task.py
+9 -34
file changed
dist-git/dist_git/importer.py
+1 -1
file changed
dist-git/dist_git/process_pool.py
+2 -2
file changed
dist-git/tests/base.py
+10 -10
file changed
dist-git/tests/test_importer.py
+1 -1
file changed
dist-git/tests/test_multiprocessing.py
+28
file added
frontend/coprs_frontend/alembic/schema/versions/26bf5b9a4dd0_indeces_for_faster_build_selection.py
+26
file added
frontend/coprs_frontend/alembic/schema/versions/3576fc77fb31_added_source_status_to_build.py
+84
file added
frontend/coprs_frontend/alembic/schema/versions/465202bfb9ce_update_db_functions.py
+5 -0
file changed
frontend/coprs_frontend/coprs/exceptions.py
+3 -1
file changed
frontend/coprs_frontend/coprs/filters.py
+13 -11
file changed
frontend/coprs_frontend/coprs/helpers.py
+68 -48
file changed
frontend/coprs_frontend/coprs/logic/builds_logic.py
+22 -10
file changed
frontend/coprs_frontend/coprs/logic/packages_logic.py
+26 -43
file changed
frontend/coprs_frontend/coprs/models.py
+4 -0
file changed
frontend/coprs_frontend/coprs/static/css/custom-styles.css
+7 -1
file changed
frontend/coprs_frontend/coprs/templates/_helpers.html
+2 -1
file changed
frontend/coprs_frontend/coprs/templates/coprs/detail/_build_states.html
+0 -2
file changed
frontend/coprs_frontend/coprs/templates/coprs/detail/_builds_forms.html
+70 -103
file changed
frontend/coprs_frontend/coprs/views/backend_ns/backend_general.py
+3 -2
file changed
frontend/coprs_frontend/coprs/views/coprs_ns/coprs_builds.py
+4 -4
file changed
frontend/coprs_frontend/tests/coprs_test_case.py
+9 -9
file changed
frontend/coprs_frontend/tests/test_views/test_backend_ns/test_backend_general.py