#311 Generate ComposeInfo which is valid also for raw_configs.
Merged 4 years ago by lsedlar. Opened 4 years ago by jkaluza.
jkaluza/odcs compose-info  into  master

file modified
+8
@@ -124,6 +124,12 @@ 

      '--lookaside-repo', default=[], action='append',

      metavar="lookaside_repos",

      help="Koji tag with module builds.")

+ create_parser.add_argument(

+     '--label', default=None,

+     help="Label for raw_config compose.")

+ create_parser.add_argument(

+     '--compose-type', default=None,

+     help="Compose type for raw_config compose.")

  

  wait_parser = subparsers.add_parser(

      'wait', help='wait for a compose to finish')
@@ -218,6 +224,8 @@ 

              module_defaults_url=args.module_defaults_url,

              module_defaults_commit=args.module_defaults_commit,

              lookaside_repos=args.lookaside_repo,

+             label=args.label,

+             compose_type=args.compose_type

          )

      elif args.command == "wait":

          result = {"id": int(args.compose_id)}

file modified
+5 -1
@@ -214,7 +214,7 @@ 

                      sigkeys=None, koji_event=None, results=None,

                      arches=None, builds=None, modular_koji_tags=None,

                      module_defaults_url=None, module_defaults_commit=None,

-                     lookaside_repos=None):

+                     lookaside_repos=None, label=None, compose_type=None):

          """Request a new compose

  

          :param str source: from where to grab and make new compose, different
@@ -263,6 +263,10 @@ 

              request_data['source']['module_defaults_commit'] = module_defaults_commit

          if lookaside_repos:

              request_data['lookaside_repos'] = lookaside_repos

+         if label:

+             request_data['label'] = label

+         if compose_type:

+             request_data['compose_type'] = compose_type

          if seconds_to_live is not None:

              request_data['seconds-to-live'] = seconds_to_live

          if flags:

file modified
+10
@@ -46,6 +46,11 @@ 

  *arches* - ``(white-space separated list of strings)``

      List of architectures the compose is generated for. The strings to express particular architecture are the same as the ones used by Koji build systemd.

  

+ .. _compose_type:

+ 

+ *compose_type* - ``(string)``

+     Type of the compose when generating raw_config compose. Can be "test", "nightly", "ci", "production".

+ 

  .. _builds:

  

  *builds* - ``(white-space separated list of strings or null)``
@@ -72,6 +77,11 @@ 

  *koji_event* - ``(number or null)``

      The Koji event defining the point in Koji history when the compose was generated. It can be ``null`` if source type does not relate to Koji tag.

  

+ .. _label:

+ 

+ *label* - ``(string)``

+     Compose label when generating raw_config compose.

+ 

  .. _lookaside_repos:

  

  *lookaside_repos* - ``(white-space separated list of strings or null)``

@@ -270,7 +270,8 @@ 

      """

      search_query = dict()

  

-     for key in ['owner', 'source_type', 'source', 'state', 'koji_task_id']:

+     for key in ['owner', 'source_type', 'source', 'state', 'koji_task_id',

+                 'pungi_compose_id', 'compose_type', 'label']:

          if flask_request.args.get(key, None):

              search_query[key] = flask_request.args[key]

  

@@ -0,0 +1,26 @@ 

+ """Add compose_type, label and pungi_compose_id columns.

+ 

+ Revision ID: cd0781bbdab1

+ Revises: de0a86d7de49

+ Create Date: 2020-01-09 11:14:30.378081

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = 'cd0781bbdab1'

+ down_revision = 'de0a86d7de49'

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ 

+ 

+ def upgrade():

+     op.add_column('composes', sa.Column('compose_type', sa.String(), nullable=True))

+     op.add_column('composes', sa.Column('label', sa.String(), nullable=True))

+     op.add_column('composes', sa.Column('pungi_compose_id', sa.String(), nullable=True))

+ 

+ 

+ def downgrade():

+     op.drop_column('composes', 'pungi_compose_id')

+     op.drop_column('composes', 'label')

+     op.drop_column('composes', 'compose_type')

file modified
+18 -1
@@ -149,13 +149,20 @@ 

      multilib_method = db.Column(db.Integer)

      # White-space separated lookaside repository URLs.

      lookaside_repos = db.Column(db.String, nullable=True)

+     # Compose label stored to ComposeInfo for Raw config composes.

+     label = db.Column(db.String, nullable=True)

+     # Compose type stored to ComposeInfo for Raw config composes.

+     compose_type = db.Column(db.String, nullable=True)

+     # Compose id as generated by Pungi for its ComposeInfo metadata.

+     pungi_compose_id = db.Column(db.String, nullable=True)

  

      @classmethod

      def create(cls, session, owner, source_type, source, results,

                 seconds_to_live, packages=None, flags=0, sigkeys=None,

                 koji_event=None, arches=None, multilib_arches=None,

                 multilib_method=None, builds=None, lookaside_repos=None,

-                modular_koji_tags=None, module_defaults_url=None):

+                modular_koji_tags=None, module_defaults_url=None,

+                label=None, compose_type=None):

          now = datetime.utcnow()

          compose = cls(

              owner=owner,
@@ -176,6 +183,8 @@ 

              lookaside_repos=lookaside_repos,

              modular_koji_tags=modular_koji_tags,

              module_defaults_url=module_defaults_url,

+             label=label,

+             compose_type=compose_type,

          )

          session.add(compose)

          return compose
@@ -210,6 +219,11 @@ 

              lookaside_repos=compose.lookaside_repos,

              modular_koji_tags=compose.modular_koji_tags,

              module_defaults_url=compose.module_defaults_url,

+             label=compose.label,

+             compose_type=compose.compose_type,

+             # Set pungi_compose_id to None, because it is regenerated once

+             # this copied Compose is started.

+             pungi_compose_id=None,

          )

          session.add(compose)

          return compose
@@ -335,6 +349,9 @@ 

              'lookaside_repos': self.lookaside_repos,

              'modular_koji_tags': self.modular_koji_tags,

              'module_defaults_url': self.module_defaults_url,

+             'label': self.label,

+             'compose_type': self.compose_type,

+             'pungi_compose_id': self.pungi_compose_id,

          }

  

      @staticmethod

file modified
+39 -17
@@ -28,10 +28,12 @@ 

  import jinja2

  import time

  from productmd.composeinfo import ComposeInfo

+ from kobo.conf import PyConfigParser

  

  import odcs.server.utils

- from odcs.server import conf, log

+ from odcs.server import conf, log, db

  from odcs.server import comps

+ from odcs.server.models import Compose

  from odcs.common.types import (

      PungiSourceType, COMPOSE_RESULTS, MULTILIB_METHODS,

      INVERSE_PUNGI_SOURCE_TYPE_NAMES, COMPOSE_FLAGS)
@@ -336,7 +338,7 @@ 

              pungi_cmd += ["--old-composes", self.old_compose]

          return pungi_cmd

  

-     def _prepare_compose_dir(self, conf_topdir, targetdir):

+     def _prepare_compose_dir(self, compose, conf_topdir, targetdir):

          """

          Creates the compose directory and returns the full path to it.

          """
@@ -346,30 +348,45 @@ 

          compose_dir = os.path.join(targetdir, compose_id)

          makedirs(compose_dir)

  

-         # Generate ComposeInfo which is needed for Pungi.

-         # These variables can be hardcoded, because we only generate composes

-         # like this in ODCS.

+         conf = PyConfigParser()

+         conf.load_from_file(os.path.join(conf_topdir, "pungi.conf"))

If there's a syntax error in the config, there will be an exception here. Will it be captured and reported properly? Maybe it should be wrapped in a RuntimeError?

This is all handled in the main try/except block in generate_compose: https://pagure.io/odcs/blob/master/f/server/odcs/server/backend.py#_776, so this should be OK.

+ 

          ci = ComposeInfo()

-         ci.release.name = "odcs-%s" % self.compose_id

-         ci.release.short = "odcs-%s" % self.compose_id

-         ci.release.version = "1"

-         ci.release.is_layered = False

-         ci.release.type = "ga"

-         ci.release.internal = False

-         ci.compose.id = compose_id

-         ci.compose.label = None

-         ci.compose.type = "nightly"

+         ci.release.name = conf["release_name"]

+         ci.release.short = conf["release_short"]

+         ci.release.version = conf["release_version"]

+         ci.release.is_layered = True if conf.get("base_product_name", "") else False

+         ci.release.type = conf.get("release_type", "ga").lower()

+         ci.release.internal = bool(conf.get("release_internal", False))

+         if ci.release.is_layered:

+             ci.base_product.name = conf["base_product_name"]

+             ci.base_product.short = conf["base_product_short"]

+             ci.base_product.version = conf["base_product_version"]

+             ci.base_product.type = conf.get("base_product_type", "ga").lower()

+ 

+         ci.compose.label = compose.label

+         ci.compose.type = compose.compose_type or "nightly"

          ci.compose.date = compose_date

          ci.compose.respin = 0

  

+         while True:

+             ci.compose.id = ci.create_compose_id()

+             existing_compose = Compose.query.filter(

+                 Compose.pungi_compose_id == ci.compose.id).first()

+             if not existing_compose:

+                 break

+             ci.compose.respin += 1

+ 

          # Dump the compose info to work/global/composeinfo-base.json.

          work_dir = os.path.join(compose_dir, "work", "global")

          makedirs(work_dir)

          ci.dump(os.path.join(work_dir, "composeinfo-base.json"))

  

+         compose.pungi_compose_id = ci.compose.id

+ 

          return compose_dir

  

-     def run_locally(self):

+     def run_locally(self, compose):

          """

          Runs local Pungi compose.

          """
@@ -377,9 +394,14 @@ 

          try:

              td = tempfile.mkdtemp()

              self._write_cfgs(td)

-             compose_dir = self._prepare_compose_dir(td, conf.target_dir)

+             compose_dir = self._prepare_compose_dir(compose, td, conf.target_dir)

              pungi_cmd = self.get_pungi_cmd(td, conf.target_dir, compose_dir)

  

+             # Commit the session to ensure that all the `compose` changes are

+             # stored database before executing the compose and are not just

+             # cached locally in the SQLAlchemy.

+             db.session.commit()

+ 

              log_out_path = os.path.join(compose_dir, "pungi-stdout.log")

              log_err_path = os.path.join(compose_dir, "pungi-stderr.log")

  
@@ -405,7 +427,7 @@ 

          :param models.Compose compose: Compose this Pungi process is running

              for.

          """

-         self.run_locally()

+         self.run_locally(compose)

  

  

  class PungiLogs(object):

file modified
+8 -1
@@ -244,6 +244,8 @@ 

          :jsonparam list multilib_arches: List of :ref:`multilib arches<multilib_arches>`.

          :jsonparam string multilib_method: List defining the :ref:`multilib method<multilib_method>`.

          :jsonparam list lookaside_repos: List of :ref:`lookaside_repos<lookaside_repos>`.

+         :jsonparam string label: String defining the :ref:`label<label>`.

+         :jsonparam string compose_type: String defining the :ref:`compose_type<compose_type>`.

          :jsonparam object source: The JSON object defining the source of compose.

          :jsonparam string source["type"]: String defining the :ref:`source type<source_type>`.

          :jsonparam string source["source"]: String defining the :ref:`source<source>`.
@@ -401,6 +403,9 @@ 

          elif module_defaults_url and module_defaults_commit:

              module_defaults = "%s %s" % (module_defaults_url, module_defaults_commit)

  

+         label = data.get("label", None)

+         compose_type = data.get("compose_type", "nightly")

+ 

          raise_if_input_not_allowed(

              source_types=source_type, sources=source, results=results,

              flags=flags, arches=arches)
@@ -414,7 +419,9 @@ 

              builds=builds,

              lookaside_repos=lookaside_repos,

              modular_koji_tags=modular_koji_tags,

-             module_defaults_url=module_defaults)

+             module_defaults_url=module_defaults,

+             label=label,

+             compose_type=compose_type)

          db.session.add(compose)

          # Flush is needed, because we use `before_commit` SQLAlchemy event to

          # send message and before_commit can be called before flush and

file modified
+5 -2
@@ -74,7 +74,10 @@ 

                           'multilib_method': 0,

                           'lookaside_repos': None,

                           'modular_koji_tags': None,

-                          'module_defaults_url': None}

+                          'module_defaults_url': None,

+                          'label': None,

+                          'compose_type': None,

+                          'pungi_compose_id': None}

          self.assertEqual(c.json(), expected_json)

  

      def test_create_copy(self):
@@ -112,7 +115,7 @@ 

              if c.name in ["id", "state", "state_reason", "time_to_expire",

                            "time_done", "time_submitted", "time_removed",

                            "removed_by", "reused_id", "koji_task_id",

-                           "time_started"]:

+                           "time_started", "pungi_compose_id"]:

                  assertMethod = self.assertNotEqual

              else:

                  assertMethod = self.assertEqual

file modified
+46 -3
@@ -24,6 +24,7 @@ 

  import shutil

  import tempfile

  import unittest

+ import time

  

  from mock import patch, MagicMock, mock_open

  from kobo.conf import PyConfigParser
@@ -339,7 +340,7 @@ 

              self.assertEqual(pungi_cfg.source, "foo:1:1:1 bar-devel:1:1:1")

  

  

- class TestPungi(unittest.TestCase):

+ class TestPungi(ModelsBaseTest):

  

      def setUp(self):

          super(TestPungi, self).setUp()
@@ -348,9 +349,19 @@ 

              makedirs(dest)

              makedirs(os.path.join(dest, "another"))

              with open(os.path.join(dest, "pungi.conf"), "w") as fd:

-                 fd.write("fake pungi conf 1")

+                 lines = [

+                     'release_name = "fake pungi conf 1"',

+                     'release_short = "compose-1"',

+                     'release_version = "10"',

+                 ]

+                 fd.writelines(lines)

              with open(os.path.join(dest, "another", "pungi.conf"), "w") as fd:

-                 fd.write("fake pungi conf 2")

+                 lines = [

+                     'release_name = "fake pungi conf 2"',

+                     'release_short = "compose-2"',

+                     'release_version = "10"',

+                 ]

+                 fd.writelines(lines)

  

          self.patch_clone_repo = patch("odcs.server.pungi.clone_repo")

          self.clone_repo = self.patch_clone_repo.start()
@@ -363,6 +374,7 @@ 

          self.ci_dump = self.patch_ci_dump.start()

  

          self.compose = MagicMock()

+         self.compose.compose_type = "nightly"

  

      def tearDown(self):

          super(TestPungi, self).tearDown()
@@ -422,6 +434,37 @@ 

          self.clone_repo.assert_called_once_with(

              'http://localhost/test.git', AnyStringWith("/raw_config_repo"),

              commit='hash')

+         compose_date = time.strftime("%Y%m%d", time.localtime())

+         self.assertEqual(self.compose.pungi_compose_id,

+                          "compose-1-10-%s.n.0" % compose_date)

+ 

+     @patch("odcs.server.utils.execute_cmd")

+     def test_pungi_run_raw_config_respin(self, execute_cmd):

+         compose = Compose.create(

+             db.session, "me", PungiSourceType.RAW_CONFIG, "foo",

+             COMPOSE_RESULTS["repository"], 3600)

+ 

+         def mocked_execute_cmd(*args, **kwargs):

+             topdir = kwargs["cwd"]

+             with open(os.path.join(topdir, "pungi.conf"), "r") as f:

+                 data = f.read()

+                 self.assertTrue("fake pungi conf 1" in data)

+         execute_cmd.side_effect = mocked_execute_cmd

+ 

+         fake_raw_config_urls = {

+             'pungi.conf': {

+                 "url": "http://localhost/test.git",

+                 "config_filename": "pungi.conf",

+             }

+         }

+         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

+             pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+             pungi.run(compose)

+             pungi.run(compose)

+ 

+         compose_date = time.strftime("%Y%m%d", time.localtime())

+         self.assertEqual(compose.pungi_compose_id,

+                          "compose-1-10-%s.n.1" % compose_date)

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run_raw_config_subpath(self, execute_cmd):

file modified
+13 -3
@@ -308,7 +308,10 @@ 

                           'multilib_method': 0,

                           'lookaside_repos': '',

                           'modular_koji_tags': None,

-                          'module_defaults_url': None}

+                          'module_defaults_url': None,

+                          'label': None,

+                          'compose_type': 'nightly',

+                          'pungi_compose_id': None}

          self.assertEqual(data, expected_json)

  

          db.session.expire_all()
@@ -1085,7 +1088,10 @@ 

                           'multilib_method': 0,

                           'lookaside_repos': '',

                           'modular_koji_tags': None,

-                          'module_defaults_url': None}

+                          'module_defaults_url': None,

+                          'label': None,

+                          'compose_type': 'nightly',

+                          'pungi_compose_id': None}

          self.assertEqual(data, expected_json)

  

          db.session.expire_all()
@@ -1351,9 +1357,13 @@ 

  

              self.client.post('/api/1/composes/', data=json.dumps(

                  {'source': {'type': 'raw_config',

-                             'source': 'pungi_cfg#hash'}}))

+                             'source': 'pungi_cfg#hash'},

+                  'label': 'Beta-1.2',

+                  'compose_type': 'production'}))

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

          self.assertEqual(c.source_type, PungiSourceType.RAW_CONFIG)

          self.assertEqual(c.source, 'pungi_cfg#hash')

+         self.assertEqual(c.label, 'Beta-1.2')

+         self.assertEqual(c.compose_type, 'production')

no initial comment

rebased onto ebf7e3d72c53d7ff58bd53ad623afe5c9d4d49e6

4 years ago

Generally looks fine. As far as I can tell, the idea is to let users of raw_config to set some of these values.

It makes sense for label and compose type (since those can not be set from config file), but I don't see the use of respin. The point of respin is to make the compose ID unique for subsequent runs with the same settings. I don't think users should be able to set it.

Also I'm not sure how labels will work. Generally the label contains two version components, so Beta-1.3 is third attempt to generate Beta 1. I think these labels should also remain unique. However maybe that does not need to be enforced in the tooling, and instead can be left to people running the composes (which I think will still be a rather limited group).

My idea was that people/automation running the composes will keep track of respin ID and possible "label". For the distro composes generated using ODCS, we plan to have jenkins job which would set the respin or label probably.

But you got me thinking that maybe we can really set respin based on the number of composes generated from the same raw_config git repo + commit hash. I'm only not sure about comps files or module defaults - these are referenced by git in pungi configuration and can influence compose even if generated from the same raw_config git repo + commit hash. Should respin be increased in this case or no?

Scratch that, I've now checked how compose_id is computed and how respin influences that and I will redo the "respin" part of the PR to be computed by ODCS.

rebased onto 6cf2fbdbb519e1da2ca744e53493be84e97a0e65

4 years ago

Can you check it now please?

It stores the generated compose_id in the models.Compose.pungi_compose_id and if new compose is requested and the pungi_compose_id already exists, it increments the respin and tries again.

As side effect, one can now find out ODCS compose from the pungi_compose_id using the API which is also nice.

I think this is better.
:thumbsup:

rebased onto 97178f2f2e91ffd256ea21c58bc0a58507f726b5

4 years ago

rebased onto 65eb5d7b3fbcce8402666a513a6cb4720a9e9fd7

4 years ago

@lsedlar, I think it's ready for real review now. I've also tested that with few composes.

If there's a syntax error in the config, there will be an exception here. Will it be captured and reported properly? Maybe it should be wrapped in a RuntimeError?

This is not a good example of label. Generally they need to follow format MILESTONE-X.Y, with list of milestones defined by productmd.
https://github.com/release-engineering/productmd/blob/master/productmd/composeinfo.py#L77

There's no validation here, which is fine, but for clarity it might be better to use a valid value anyway.

This is all handled in the main try/except block in generate_compose: https://pagure.io/odcs/blob/master/f/server/odcs/server/backend.py#_776, so this should be OK.

I will add ci there. The productmd online documentation does not mention that though.

rebased onto 6525a29

4 years ago

Pull-Request has been merged by lsedlar

4 years ago

I'll update the productmd docs.