#324 Store pungi_config_dump in database.
Merged 4 years ago by lsedlar. Opened 4 years ago by jkaluza.
jkaluza/odcs store-config-dump  into  master

file modified
+5
@@ -121,6 +121,11 @@ 

  *packages* - ``(white-space separated list of strings or null)``

      List of names of RPMs (packages) which should appear in the compose. The list of packages to choose from is defined by the content of Koji builds defined in ``builds``. If ``null``, all packages from ``builds`` will be included in a compose.

  

+ .. _pungi_config_dump:

+ 

+ *pungi_config_dump* - ``(string)``

+     Full dump of Pungi configuration used to generate the compose. It is stored only when ``compose_type`` is set to ``production``. This field appears in the API responses only if single compose is returned.

+ 

  .. _removed_by:

  

  *removed_by* - ``(string)``

@@ -809,6 +809,14 @@ 

      # Generate symlinks pointing to latest build of raw_config compose.

      if compose.source_type == PungiSourceType.RAW_CONFIG:

          generate_compose_symlink(compose)

+         if compose.compose_type == "production":

+             pungi_logs = PungiLogs(compose)

+             config_dump = pungi_logs.get_config_dump()

+             if not config_dump:

+                 msg = "%r: Cannot load Pungi config dump." % compose

+                 log.error(msg)

+                 raise RuntimeError(msg)

+             compose.pungi_config_dump = config_dump

  

      # If there is no exception generated by the pungi.run() and if

      # validation didn't fail, then we know the compose has been

@@ -0,0 +1,22 @@ 

+ """Add pungi_config_dump column.

+ 

+ Revision ID: 82172e6a3154

+ Revises: cd0781bbdab1

+ Create Date: 2020-02-05 13:20:59.014127

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '82172e6a3154'

+ down_revision = 'cd0781bbdab1'

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ 

+ 

+ def upgrade():

+     op.add_column('composes', sa.Column('pungi_config_dump', sa.String(), nullable=True))

+ 

+ 

+ def downgrade():

+     op.drop_column('composes', 'pungi_config_dump')

file modified
+10 -2
@@ -155,6 +155,8 @@ 

      compose_type = db.Column(db.String, nullable=True)

      # Compose id as generated by Pungi for its ComposeInfo metadata.

      pungi_compose_id = db.Column(db.String, nullable=True)

+     # Full Pungi configuration dump, used only for raw_config source type.

+     pungi_config_dump = db.Column(db.String, nullable=True)

  

      @classmethod

      def create(cls, session, owner, source_type, source, results,
@@ -221,6 +223,7 @@ 

              module_defaults_url=compose.module_defaults_url,

              label=compose.label,

              compose_type=compose.compose_type,

+             pungi_config_dump=compose.pungi_config_dump,

              # Set pungi_compose_id to None, because it is regenerated once

              # this copied Compose is started.

              pungi_compose_id=None,
@@ -305,7 +308,7 @@ 

              return COMPOSE_STATES[field]

          raise ValueError("%s: %s, not in %r" % (key, field, COMPOSE_STATES))

  

-     def json(self):

+     def json(self, full=False):

          flags = []

          for name, value in COMPOSE_FLAGS.items():

              if value == 0:
@@ -320,7 +323,7 @@ 

              if self.results & value:

                  results.append(name)

  

-         return {

+         ret = {

              'id': self.id,

              'owner': self.owner,

              'source_type': self.source_type,
@@ -354,6 +357,11 @@ 

              'pungi_compose_id': self.pungi_compose_id,

          }

  

+         if full:

+             ret["pungi_config_dump"] = self.pungi_config_dump

+ 

+         return ret

+ 

      @staticmethod

      def _utc_datetime_to_iso(datetime_object):

          """

@@ -62,6 +62,7 @@ 

          """Validate configuration. Raises an exception of error found."""

          pass

  

+ 

  class RawPungiConfig(BasePungiConfig):

  

      def __init__(self, compose_source):
@@ -476,6 +477,17 @@ 

          return os.path.join(

              toplevel_work_dir, "logs", "global", "pungi.global.log")

  

+     @property

+     def config_dump_path(self):

+         """

+         Returns path to Pungi config dump.

+         """

+         toplevel_work_dir = self.compose.toplevel_work_dir

+         if not toplevel_work_dir:

+             return None

+         return os.path.join(

+             toplevel_work_dir, "logs", "global", "config-dump.global.log")

+ 

      def _get_global_log_errors(self):

          """

          Helper method which opens the `self.global_log_path` and search for
@@ -524,3 +536,11 @@ 

          errors = errors.replace(

              conf.target_dir, conf.target_dir_url)

          return errors

+ 

+     def get_config_dump(self):

+         config_dump_path = self.config_dump_path

+         if not config_dump_path:

+             return None

+ 

+         with open(config_dump_path, "r") as config_dump:

+             return config_dump.read()

file modified
+1 -1
@@ -151,7 +151,7 @@ 

          else:

              compose = Compose.query.filter_by(id=id).first()

              if compose:

-                 return jsonify(compose.json()), 200

+                 return jsonify(compose.json(True)), 200

              else:

                  raise NotFound('No such compose found.')

  

file modified
+9 -6
@@ -992,11 +992,13 @@ 

      @patch("odcs.server.utils.makedirs")

      @patch("os.symlink")

      @patch("os.unlink")

-     def test_generate_pungi_compose_raw_config(self, unlink, symlink, makedirs):

+     @patch("odcs.server.pungi.PungiLogs.get_config_dump")

+     def test_generate_pungi_compose_raw_config(self, config_dump, unlink, symlink, makedirs):

+         config_dump.return_value = "fake\npungi\nconf\n"

          c = Compose.create(

              db.session, "me", PungiSourceType.RAW_CONFIG, "pungi_cfg#hash",

              COMPOSE_RESULTS["repository"], 60)

-         c.compose_type = "nightly"

+         c.compose_type = "production"

          c.pungi_compose_id = "compose-1-10-2020110.n.0"

          c.id = 1

  
@@ -1009,21 +1011,22 @@ 

          with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

              generate_pungi_compose(c)

  

+         self.assertEqual(c.pungi_config_dump, "fake\npungi\nconf\n")

          self.assertEqual(self.pungi_config.pungi_cfg, {

              'url': 'git://localhost/test.git',

              'config_filename': 'pungi.conf',

              'commit': 'hash'

          })

  

-         makedirs.assert_called_once_with(AnyStringWith("/test_composes/nightly"))

+         makedirs.assert_called_once_with(AnyStringWith("/test_composes/production"))

          symlink.assert_has_calls([

              call('../odcs-1-2018-1',

-                  AnyStringWith('/test_composes/nightly/compose-1-10-2020110.n.0')),

+                  AnyStringWith('/test_composes/production/compose-1-10-2020110.n.0')),

              call('../odcs-1-2018-1',

-                  AnyStringWith('/test_composes/nightly/latest-compose-1')),

+                  AnyStringWith('/test_composes/production/latest-compose-1')),

          ])

          unlink.assert_called_with(

-             AnyStringWith('/test_composes/nightly/latest-compose-1'))

+             AnyStringWith('/test_composes/production/latest-compose-1'))

  

  

  class TestValidatePungiCompose(ModelsBaseTest):

file modified
+4 -2
@@ -44,6 +44,7 @@ 

          db.session.expire_all()

  

          c = db.session.query(Compose).filter(compose.id == 1).one()

+         c.pungi_config_dump = "test"

          self.assertEqual(c.owner, "me")

          self.assertEqual(c.source_type, PungiSourceType.MODULE)

          self.assertEqual(c.source, "testmodule-master")
@@ -77,8 +78,9 @@ 

                           'module_defaults_url': None,

                           'label': None,

                           'compose_type': None,

-                          'pungi_compose_id': None}

-         self.assertEqual(c.json(), expected_json)

+                          'pungi_compose_id': None,

+                          'pungi_config_dump': 'test'}

+         self.assertEqual(c.json(True), expected_json)

  

      def test_create_copy(self):

          """

@@ -575,3 +575,14 @@ 

          pungi_logs = PungiLogs(self.compose)

          errors = pungi_logs.get_error_string()

          self.assertEqual(errors, "")

+ 

+     @patch("odcs.server.pungi.open", create=True)

+     def test_config_dump(self, patched_open):

+         patched_open.return_value = mock_open(

+             read_data="fake\npungi\nconf\n").return_value

+ 

+         pungi_logs = PungiLogs(self.compose)

+         ret = pungi_logs.get_config_dump()

+         self.assertEqual(ret, "fake\npungi\nconf\n")

+ 

+         patched_open.assert_called_once_with(AnyStringWith("logs/global/config-dump.global.log"), "r")

@@ -811,6 +811,7 @@ 

          data = json.loads(resp.get_data(as_text=True))

          self.assertEqual(data['id'], 1)

          self.assertEqual(data['source'], "testmodule:master")

+         self.assertEqual(data['pungi_config_dump'], None)

  

      def test_query_composes(self):

          resp = self.client.get('/api/1/composes/')
@@ -822,6 +823,7 @@ 

          evs = json.loads(resp.get_data(as_text=True))['items']

          self.assertEqual(len(evs), 1)

          self.assertEqual(evs[0]['source'], 'f26')

+         self.assertTrue("pungi_config_dump" not in evs[0])

  

      def test_query_compose_state_done(self):

          resp = self.client.get(

To be able to rebuild production composes in the future, it is
needed to store the full Pungi configuration dump in the ODCS
database.

This commit does following to implement it:

  • New pungi_config_dump column is added to database.
  • The API returns pungi_config_dump, but only in case when
    single ODCS compose is returned. In case multiple composes are
    queried, this field is not returned to keep the response small.
  • The pungi_config_dump is also not sent in UMB or fedora messages.
  • The pungi_config_dump is stored only for production composes.
  • In case the config dump cannot be found in generated compose,
    the compose is marked as failed. It means every production
    compose must have the config dump stored in the database.

Pull-Request has been merged by lsedlar

4 years ago