#344 Fix #343: Allow configuring multiple target directories.
Merged 4 years ago by lsedlar. Opened 4 years ago by jkaluza.
jkaluza/odcs volumes  into  master

file modified
+14
@@ -136,11 +136,17 @@ 

  *result_repo* - ``(string)``

      The URL to top directory where per-architecture repositories are stored. Only set for composes which generate such repositories on ODCS server.

  

+     .. note::

+         If ``target_dir`` is set to non-default value, then the ``result_repo`` might be an empty string, because ODCS might not have enough data to generate the URL.

+ 

  .. _result_repofile:

  

  *result_repofile* - ``(string)``

      The URL to .repo file which points to resulting compose. Only set for composes which generate such single repository.

  

+     .. note::

+         If ``target_dir`` is set to non-default value, then the ``result_repofile`` might be an empty string, because ODCS might not have enough data to generate the URL.

+ 

  .. _sigkeys:

  

  *sigkeys* - ``(white-space separated list of strings)``
@@ -180,6 +186,14 @@ 

  *state_name* - ``(string)``

      Name of the state the compose is currently in. See ``state`` for more info.

  

+ .. _target_dir:

+ 

+ *target_dir* - ``(string)``

+     Name of the target directory for the compose. No value or the ``default`` value means that default target directory is used. This default target directory is always served using the ODCS Frontend. Other possible values depend on the ODCS server configuration.

+ 

+     .. note::

+         If ``target_dir`` is set to non-default value, then the ``result_repo`` and ``result_repofile`` might be an empty string, because ODCS might not have enough data to generate the URL.

+ 

  .. _time_done:

  

  *time_done* - ``(datetime)``

@@ -121,6 +121,12 @@ 

                  values = _enum_int_to_str_list(COMPOSE_FLAGS, values)

              elif name == "results":

                  values = _enum_int_to_str_list(COMPOSE_RESULTS, values)

+             elif name == "target_dirs":

+                 # The default conf.target_dir is always allowed.

+                 if values == conf.target_dir:

+                     continue

+                 inverse_extra_target_dirs = {v: k for k, v in conf.extra_target_dirs.items()}

+                 values = inverse_extra_target_dirs[values]

  

              if type(values) == int:

                  values = [values]

file modified
+17 -12
@@ -208,6 +208,10 @@ 

  

          composes = Compose.composes_to_expire()

          for compose in composes:

+             # Check if target_dir is accessible on this backend and if not, skip it.

+             if not os.path.exists(compose.target_dir):

+                 continue

+ 

              log.info("%r: Removing compose", compose)

              if compose.removed_by:

                  state_reason = "Removed by {}.".format(compose.removed_by)
@@ -228,8 +232,9 @@ 

          # by ODCS.

          odcs_paths = []

          for dirname in ["latest-odcs-*", "odcs-*"]:

-             path = os.path.join(conf.target_dir, dirname)

-             odcs_paths += glob.glob(path)

+             for target_dir in [conf.target_dir] + list(conf.extra_target_dirs.values()):

+                 path = os.path.join(target_dir, dirname)

+                 odcs_paths += glob.glob(path)

  

          # Then try removing them if they are left there by some error.

          for path in odcs_paths:
@@ -257,8 +262,7 @@ 

                  continue

  

          # Remove old Koji tag data from Koji tag cache.

-         koji_tag_cache = KojiTagCache()

-         koji_tag_cache.remove_old_koji_tag_cache_data()

+         KojiTagCache.remove_old_koji_tag_cache_data()

  

  

  def create_koji_session():
@@ -678,13 +682,13 @@ 

      Generates symlink(s) for compose based on its `compose.pungi_compose_id`.

  

      It generates following symlinks pointing to the compose:

-       - $conf.target_dir/$compose.compose_type/$compose.pungi_compose_id

-       - $conf.target_dir/$compose.compose_type/latest-$name-version

+       - $compose.target_dir/$compose.compose_type/$compose.pungi_compose_id

+       - $compose.target_dir/$compose.compose_type/latest-$name-version

  

      If the latest-* symlink exists, it is replaced with new one pointing to

      the `composes`.

      """

-     symlink_dir = os.path.join(conf.target_dir, compose.compose_type)

+     symlink_dir = os.path.join(compose.target_dir, compose.compose_type)

      odcs.server.utils.makedirs(symlink_dir)

  

      # Generate the non-latest symlink.
@@ -714,7 +718,7 @@ 

      if not compose.compose_type or not compose.pungi_compose_id:

          return

  

-     symlink_dir = os.path.join(conf.target_dir, compose.compose_type)

+     symlink_dir = os.path.join(compose.target_dir, compose.compose_type)

      symlink = os.path.join(symlink_dir, compose.pungi_compose_id)

  

      # Check if latest_symlink points to the same directory as the non-latest
@@ -745,7 +749,7 @@ 

      """

      Generates the compose of KOJI, TAG, or REPO type using the Pungi tool.

      """

-     koji_tag_cache = KojiTagCache()

+     koji_tag_cache = KojiTagCache(compose)

  

      # Resolve the general data in the compose.

      resolve_compose(compose)
@@ -802,7 +806,7 @@ 

          # to Pungi COMPOSE_ID. We can use directory with these symlinks to

          # find out previous old compose.

          if compose.source_type == PungiSourceType.RAW_CONFIG:

-             old_compose = os.path.join(conf.target_dir, compose.compose_type)

+             old_compose = os.path.join(compose.target_dir, compose.compose_type)

  

          pungi = Pungi(compose.id, pungi_cfg, koji_event, old_compose)

          pungi.run(compose)
@@ -900,12 +904,13 @@ 

  

              # Be nice to end user and replace paths to logs or other files with URL

              # accessible to the user.

-             state_reason = state_reason.replace(conf.target_dir, conf.target_dir_url)

+             if compose.target_dir == conf.target_dir:

+                 state_reason = state_reason.replace(conf.target_dir, conf.target_dir_url)

              compose.transition(COMPOSE_STATES["failed"], state_reason)

  

          compose = Compose.query.filter(Compose.id == compose_id).one()

  

-         koji_tag_cache = KojiTagCache()

+         koji_tag_cache = KojiTagCache(compose)

          koji_tag_cache.cleanup_reused(compose)

  

          # Commit the session to ensure that database transaction is closed and

file modified
+25 -19
@@ -44,11 +44,12 @@ 

      ODCS compose attributes which influences the repodata.

      """

  

-     def __init__(self):

-         self.cache_dir = os.path.join(conf.target_dir, "koji_tag_cache")

+     def __init__(self, compose):

+         self.cache_dir = os.path.join(compose.target_dir, "koji_tag_cache")

          makedirs(self.cache_dir)

  

-     def remove_old_koji_tag_cache_data(self):

+     @classmethod

+     def remove_old_koji_tag_cache_data(klass):

          """

          Removes the old unused Koji tag cache directories which has not been

          used for more than `conf.koji_tag_cache_cleanup_timeout` days.
@@ -58,24 +59,29 @@ 

          older_than_seconds = conf.koji_tag_cache_cleanup_timeout * 24 * 3600

          threshold = time.time() - older_than_seconds

  

-         for cached_dir in os.listdir(self.cache_dir):

-             path = os.path.join(self.cache_dir, cached_dir)

-             try:

-                 mtime = os.path.getmtime(path)

-             except OSError:

-                 # Directory might have been removed in meantime by some

-                 # 3rd party process.

-                 log.exception(

-                     "Old koji tag cache directory %s removed while checking "

-                     "Koji cache." % path)

+         for target_dir in [conf.target_dir] + list(conf.extra_target_dirs.values()):

+             cache_dir = os.path.join(target_dir, "koji_tag_cache")

+             if not os.path.exists(cache_dir):

                  continue

  

-             if mtime > threshold:

-                 # Koji tag directory is not old enough to be removed.

-                 continue

- 

-             log.info("Removing old Koji tag cache data in %s." % path)

-             shutil.rmtree(path)

+             for cached_dir in os.listdir(cache_dir):

+                 path = os.path.join(cache_dir, cached_dir)

+                 try:

+                     mtime = os.path.getmtime(path)

+                 except OSError:

+                     # Directory might have been removed in meantime by some

+                     # 3rd party process.

+                     log.exception(

+                         "Old koji tag cache directory %s removed while checking "

+                         "Koji cache." % path)

+                     continue

+ 

+                 if mtime > threshold:

+                     # Koji tag directory is not old enough to be removed.

+                     continue

+ 

+                 log.info("Removing old Koji tag cache data in %s." % path)

+                 shutil.rmtree(path)

  

      def cached_compose_dir(self, compose):

          """

@@ -149,6 +149,12 @@ 

              'type': str,

              'default': "http://localhost/odcs",

              'desc': 'Public facing URL to target_dir.'},

+         'extra_target_dirs': {

+             'type': dict,

+             'default': {},

+             'desc': 'Extra target_dirs to optionally store the compose on '

+                     'instead of the conf.target_dir. Key is the name '

+                     'of volume, value if path to target_dir.'},

          'seconds_to_live': {

              'type': int,

              'default': 24 * 60 * 60,

@@ -145,7 +145,7 @@ 

          # Generate the pulp_repo_cache structure and locks for each repo.

          for repo in repos:

              repo_path = os.path.join(

-                 conf.target_dir, "pulp_repo_cache", repo.replace(repo_prefix, ""))

+                 self.compose.target_dir, "pulp_repo_cache", repo.replace(repo_prefix, ""))

              repo_paths.append(repo_path)

              makedirs(repo_path)

  
@@ -169,7 +169,7 @@ 

  

              args = [mergerepo_exe, "--method", "nvr", "-o",

                      result_repo_dir]

-             args += ["--repo-prefix-search", os.path.join(conf.target_dir, "pulp_repo_cache")]

+             args += ["--repo-prefix-search", os.path.join(self.compose.target_dir, "pulp_repo_cache")]

              args += ["--repo-prefix-replace", repo_prefix]

              for repo in repo_paths:

                  args.append("-r")

@@ -0,0 +1,23 @@ 

+ """Add target_dir column.

+ 

+ Revision ID: 812f2745248f

+ Revises: a855c39e2a0f

+ Create Date: 2020-03-13 13:28:24.381076

+ 

+ """

+ 

+ # revision identifiers, used by Alembic.

+ revision = '812f2745248f'

+ down_revision = 'a855c39e2a0f'

+ 

+ from alembic import op

+ import sqlalchemy as sa

+ from odcs.server import conf

+ 

+ 

+ def upgrade():

+     op.add_column('composes', sa.Column('target_dir', sa.String(), nullable=True, default=conf.target_dir))

+ 

+ 

+ def downgrade():

+     op.drop_column('composes', 'target_dir')

file modified
+21 -3
@@ -159,6 +159,9 @@ 

      pungi_config_dump = db.Column(db.String, nullable=True)

      # UUID of the celery task.

      celery_task_id = db.Column(db.String, nullable=True)

+     # Target directory in which the compose is stored. This is `conf.target_dir`

+     # by default.

+     target_dir = db.Column(db.String)

  

      @classmethod

      def create(cls, session, owner, source_type, source, results,
@@ -166,7 +169,7 @@ 

                 koji_event=None, arches=None, multilib_arches=None,

                 multilib_method=None, builds=None, lookaside_repos=None,

                 modular_koji_tags=None, module_defaults_url=None,

-                label=None, compose_type=None):

+                label=None, compose_type=None, target_dir=None):

          now = datetime.utcnow()

          compose = cls(

              owner=owner,
@@ -189,6 +192,7 @@ 

              module_defaults_url=module_defaults_url,

              label=label,

              compose_type=compose_type,

+             target_dir=target_dir or conf.target_dir,

          )

          session.add(compose)

          return compose
@@ -231,6 +235,7 @@ 

              pungi_compose_id=None,

              # Also reset celery task_id

              celery_task_id=None,

+             target_dir=compose.target_dir,

          )

          session.add(compose)

          return compose
@@ -254,7 +259,7 @@ 

          # a race between we start Pungi and when Pungi generates that dir,

          # so just use `glob` to find out the rigth directory.

          glob_str = os.path.join(

-             conf.target_dir, "odcs-%d-1-*.n.0" % self.id)

+             self.target_dir, "odcs-%d-1-*.n.0" % self.id)

          toplevel_dirs = glob.glob(glob_str)

          if toplevel_dirs:

              return toplevel_dirs[0]
@@ -266,7 +271,7 @@ 

              toplevel_dir = self.toplevel_work_dir

              if toplevel_dir:

                  return toplevel_dir

-         return os.path.join(conf.target_dir, self.latest_dir)

+         return os.path.join(self.target_dir, self.latest_dir)

  

      @property

      def result_repo_dir(self):
@@ -281,6 +286,9 @@ 

          """

          Returns public URL to compose directory with per-arch repositories.

          """

+         if self.target_dir != conf.target_dir:

+             return ""

+ 

          target_dir_url = conf.target_dir_url

  

          return target_dir_url + "/" \
@@ -299,6 +307,9 @@ 

          """

          Returns public URL to repofile.

          """

+         if self.target_dir != conf.target_dir:

+             return ""

+ 

          target_dir_url = conf.target_dir_url

          return target_dir_url + "/" \

              + os.path.join(self.latest_dir, "compose", "Temporary",
@@ -327,6 +338,12 @@ 

              if self.results & value:

                  results.append(name)

  

+         if self.target_dir == conf.target_dir:

+             target_dir = "default"

+         else:

+             inverse_extra_target_dirs = {v: k for k, v in conf.extra_target_dirs.items()}

+             target_dir = inverse_extra_target_dirs.get(self.target_dir, "unknown")

+ 

          ret = {

              'id': self.id,

              'owner': self.owner,
@@ -359,6 +376,7 @@ 

              'label': self.label,

              'compose_type': self.compose_type,

              'pungi_compose_id': self.pungi_compose_id,

+             'target_dir': target_dir,

          }

  

          if full:

file modified
+5 -4
@@ -403,9 +403,9 @@ 

          try:

              td = tempfile.mkdtemp()

              self._write_cfgs(td)

-             compose_dir = self._prepare_compose_dir(compose, td, conf.target_dir)

+             compose_dir = self._prepare_compose_dir(compose, td, compose.target_dir)

              self.pungi_cfg.validate(td, compose_dir)

-             pungi_cmd = self.get_pungi_cmd(td, conf.target_dir, compose_dir)

+             pungi_cmd = self.get_pungi_cmd(td, compose.target_dir, compose_dir)

  

              # Commit the session to ensure that all the `compose` changes are

              # stored database before executing the compose and are not just
@@ -511,8 +511,9 @@ 

                  continue

              errors += error

  

-         errors = errors.replace(

-             conf.target_dir, conf.target_dir_url)

+         if self.compose.target_dir == conf.target_dir:

+             errors = errors.replace(

+                 conf.target_dir, conf.target_dir_url)

          return errors

  

      def get_config_dump(self):

file modified
+13 -2
@@ -254,6 +254,7 @@ 

          :jsonparam list lookaside_repos: List of :ref:`lookaside_repos<lookaside_repos>`.

          :jsonparam string label: String defining the :ref:`label<label>`.

          :jsonparam string compose_type: String defining the :ref:`compose_type<compose_type>`.

+         :jsonparam string target_dir: String defining the :ref:`target_dir<target_dir>`.

          :jsonparam object source: The JSON object defining the source of compose.

          :jsonparam string source["type"]: String defining the :ref:`source type<source_type>`.

          :jsonparam string source["source"]: String defining the :ref:`source<source>`.
@@ -414,9 +415,18 @@ 

          label = data.get("label", None)

          compose_type = data.get("compose_type", "test")

  

+         target_dir = data.get("target_dir")

+         if target_dir and target_dir != "default":

+             if target_dir not in conf.extra_target_dirs:

+                 raise ValueError('Unknown "target_dir" "%s"' % target_dir)

+             target_dir = conf.extra_target_dirs[target_dir]

+         else:

+             target_dir = conf.target_dir

+ 

          raise_if_input_not_allowed(

              source_types=source_type, sources=source, results=results,

-             flags=flags, arches=arches, compose_types=compose_type)

+             flags=flags, arches=arches, compose_types=compose_type,

+             target_dirs=target_dir)

  

          compose = Compose.create(

              db.session, self._get_compose_owner(), source_type, source,
@@ -429,7 +439,8 @@ 

              modular_koji_tags=modular_koji_tags,

              module_defaults_url=module_defaults,

              label=label,

-             compose_type=compose_type)

+             compose_type=compose_type,

+             target_dir=target_dir)

          db.session.add(compose)

          # Flush is needed, because we use `before_commit` SQLAlchemy event to

          # send message and before_commit can be called before flush and

file modified
+19 -8
@@ -22,9 +22,10 @@ 

  

  import time

  import os

- from mock import patch

+ from mock import patch, MagicMock, call

  

  from .utils import ModelsBaseTest

+ import odcs.server

  from odcs.server import db, conf

  from odcs.server.cache import KojiTagCache

  from odcs.server.models import Compose
@@ -36,7 +37,9 @@ 

  

      def setUp(self):

          super(TestKojiTagCache, self).setUp()

-         self.cache = KojiTagCache()

+         compose = MagicMock()

+         compose.target_dir = conf.target_dir

+         self.cache = KojiTagCache(compose)

  

      def test_cached_compose_dir(self):

          c = Compose.create(
@@ -142,22 +145,30 @@ 

      @patch("os.listdir")

      @patch("os.path.getmtime")

      @patch("shutil.rmtree")

-     def test_remove_old_koji_tag_cache_data(self, rmtree, getmtime, listdir):

+     @patch("os.path.exists")

+     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

+                   new={"releng-private": "/tmp/private"})

+     def test_remove_old_koji_tag_cache_data(self, exists, rmtree, getmtime, listdir):

+         exists.return_value = True

          now = time.time()

-         listdir.return_value = ["foo", "bar"]

+         listdir.side_effect = [["foo", "bar"], ["foo", "bar"]]

          # Default timeout is 30 days, so set 31 for first dir and 29 for

          # second dir.

-         getmtime.side_effect = [now - 31 * 24 * 3600, now - 29 * 24 * 3600]

+         getmtime.side_effect = [now - 31 * 24 * 3600, now - 29 * 24 * 3600] * 2

  

          self.cache.remove_old_koji_tag_cache_data()

-         rmtree.assert_called_once_with(

-             os.path.join(self.cache.cache_dir, "foo"))

+         self.assertEqual(

+             rmtree.call_args_list,

+             [call(os.path.join(self.cache.cache_dir, "foo")),

+              call("/tmp/private/koji_tag_cache/foo")])

  

      @patch("os.listdir")

      @patch("os.path.getmtime")

      @patch("shutil.rmtree")

+     @patch("os.path.exists")

      def test_remove_old_koji_tag_cache_data_getmtime_raises(

-             self, rmtree, getmtime, listdir):

+             self, exists, rmtree, getmtime, listdir):

+         exists.return_value = True

          listdir.return_value = ["foo", "bar"]

          getmtime.side_effect = OSError("path does not exist")

  

file modified
+2 -1
@@ -79,7 +79,8 @@ 

                           'label': None,

                           'compose_type': None,

                           'pungi_compose_id': None,

-                          'pungi_config_dump': 'test'}

+                          'pungi_config_dump': 'test',

+                          'target_dir': 'default'}

          self.assertEqual(c.json(True), expected_json)

  

      def test_create_copy(self):

@@ -371,6 +371,7 @@ 

          self.ci_dump = self.patch_ci_dump.start()

  

          self.compose = MagicMock()

+         self.compose.target_dir = conf.target_dir

          self.compose.compose_type = "test"

  

      def tearDown(self):

@@ -20,6 +20,7 @@ 

  #

  # Written by Jan Kaluza <jkaluza@redhat.com>

  

+ import odcs.server

  from odcs.server import db, conf

  from odcs.server.models import Compose

  from odcs.common.types import COMPOSE_STATES, COMPOSE_RESULTS
@@ -145,6 +146,22 @@ 

      @patch("os.path.isdir")

      @patch("glob.glob")

      @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

+     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

+                   new={"releng-private": "/tmp/private"})

+     def test_remove_left_composes_extra_target_dir(self, remove_compose_dir, glob, isdir):

+         isdir.return_value = True

+         self.thread.do_work()

+         print(glob.call_args_list)

+         self.assertEqual(

+             glob.call_args_list,

+             [mock.call(os.path.join(conf.target_dir, "latest-odcs-*")),

+              mock.call("/tmp/private/latest-odcs-*"),

+              mock.call(os.path.join(conf.target_dir, "odcs-*")),

+              mock.call("/tmp/private/odcs-*")])

+ 

+     @patch("os.path.isdir")

+     @patch("glob.glob")

+     @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

      def test_remove_left_composes_not_dir(

              self, remove_compose_dir, glob, isdir):

          isdir.return_value = False

file modified
+61 -3
@@ -119,7 +119,8 @@ 

                      'compose_types': ["test", "nightly"]

                  },

                  'dev3': {

-                     'source_types': ['tag']

+                     'source_types': ['tag'],

+                     'target_dirs': ["releng-private"]

                  }

              }

          }
@@ -318,7 +319,8 @@ 

                           'module_defaults_url': None,

                           'label': None,

                           'compose_type': 'test',

-                          'pungi_compose_id': None}

+                          'pungi_compose_id': None,

+                          'target_dir': 'default'}

          self.assertEqual(data, expected_json)

  

          db.session.expire_all()
@@ -534,6 +536,61 @@ 

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

+     def test_submit_build_target_dir_unknown(self):

+         with self.test_request_context(user='dev'):

+             flask.g.oidc_scopes = [

+                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+             ]

+ 

+             rv = self.client.post('/api/1/composes/', data=json.dumps(

+                 {'source': {'type': 'tag', 'source': 'f26'},

+                  'target_dir': 'foo'}))

+             data = json.loads(rv.get_data(as_text=True))

+ 

+         self.assertEqual(data['status'], 400)

+         self.assertEqual(data['error'], 'Bad Request')

+         self.assertEqual(data['message'], 'Unknown "target_dir" "foo"')

+ 

+     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

+                   new={"releng-private": "/tmp/private"})

+     def test_submit_build_target_not_allowed(self):

+         with self.test_request_context(user='dev'):

+             flask.g.oidc_scopes = [

+                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+             ]

+ 

+             rv = self.client.post('/api/1/composes/', data=json.dumps(

+                 {'source': {'type': 'tag', 'source': 'f26'},

+                  'target_dir': 'releng-private'}))

+             data = json.loads(rv.get_data(as_text=True))

+ 

+         self.assertEqual(data['status'], 403)

+         self.assertEqual(data['error'], 'Forbidden')

+         self.assertEqual(

+             data['message'],

+             'User dev not allowed to operate with compose with target_dirs=releng-private.')

+ 

+     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

+                   new={"releng-private": "/tmp/private"})

+     def test_submit_build_target_dir(self):

+         with self.test_request_context(user='dev3'):

+             flask.g.oidc_scopes = [

+                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+             ]

+ 

+             rv = self.client.post('/api/1/composes/', data=json.dumps(

+                 {'source': {'type': 'tag', 'source': 'f26'},

+                  'target_dir': 'releng-private'}))

+             self.assertEqual(rv.status, '200 OK')

+             data = json.loads(rv.get_data(as_text=True))

+ 

+         self.assertEqual(data['target_dir'], 'releng-private')

+ 

+         db.session.expire_all()

+         c = db.session.query(Compose).filter(Compose.id == 3).one()

+         self.assertEqual(c.state, COMPOSE_STATES["wait"])

+         self.assertEqual(c.target_dir, "/tmp/private")

+ 

      def test_submit_build_module_defaults_url(self):

          with self.test_request_context(user='dev'):

              flask.g.oidc_scopes = [
@@ -1168,7 +1225,8 @@ 

                           'module_defaults_url': None,

                           'label': None,

                           'compose_type': 'test',

-                          'pungi_compose_id': None}

+                          'pungi_compose_id': None,

+                          'target_dir': 'default'}

          self.assertEqual(data, expected_json)

  

          db.session.expire_all()

ODCS currently always store resulting compose in the single
conf.target_dir directory. There are at least two use-cases
for which it would make sense to store some composes in different
locations:

  • The compose contains data which cannot be accessed publicly.
    This is needed by Fedora releng team to generate internal-only
    composes with packages which cannot be publicly available because
    of licensing issues.
  • The ODCS backend generating the compose runs in different geo-location
    and uses completely different NFS storage for composes. This might
    be used in the future to allow CI infrastructure to use ODCS and
    store the resulting composes closer to the CI VMs.

This commit allows choosing the resulting target_dir by implementing
following points:

  • New conf.extra_target_dirs option is introduced.
  • The conf.clients or conf.admins options can allow certain users/groups
    to use particular target dir defined in conf.extra_target_dirs.
  • New target_dir API variable is introduced to choose the target_dir.
    If unset, default conf.target_dir is used.
  • The target_dir is stored in DB using the Compose model.
  • The code to remove expired composes now checks composes in all configured
    target directories and tries to remove them. If the target directory
    is not available on particular backend, the compose removal is skipped.
    This means that only backends which have access to particular target_dir
    are able to remove expired composes.
  • The KojiTagCache code is changed to support multiple target directories.

Signed-off-by: Jan Kaluza jkaluza@redhat.com

I will do documentation in extra commit once we finish the code review.

rebased onto c688a26a7396e28a038f732b11cbe488677ffbad

4 years ago

Generally the patch looks good.

I'm slightly concerned about the possibility that some composes will not have valid URLs. How will users know which is the compose they asked for? Will they have to construct the path themselves based on the ID?

I'm going to think and work more on the API side of this. But generally, the answer is that composes generated on the extra_target_dir won't be accessible by the default frontend and uses will need to determine the path to compose themselves.

The use-case is to generate composes for particular teams on their own storage to mitigate problems with mirroring the private composes to multiple locations or to make the private composes publicly accessible.

For example, the OSCI team is generating the composes in BOS and they are later mirrored to RDU from where they further copy them to their own volume. That teams does not care about valid URL to compose at all. They only use NFS to access the compose. Using this, we could in the future create backend in RDU which would generate the compose directly on their target storage, so they wouldn't need to wait for mirroring from BOS to RDU and further copy to permanent location.

Another example is Fedora releng team. They want to generate private-only composes which must not appear in the directory served by the ODCS Frontend.

I admit that the API should show that the composes is not availabe on frontend and should also show where it is available. I'm going to work on this part.

Otherwise, I have tested this patch on my testing ODCS instance and it works as expected.

1 new commit added

  • Add documentation and show the target_dir in API response.
4 years ago

Added docs, see second commit.

Ok, I think with the documentation it looks good.

rebased onto 028b0c8

4 years ago

Pull-Request has been merged by lsedlar

4 years ago