From 028b0c83d27127dda9d7bdf62e478d942664a76c Mon Sep 17 00:00:00 2001 From: Jan Kaluza Date: Mar 16 2020 13:32:54 +0000 Subject: Fix #343: Allow configuring multiple target directories. ODCS currently always store resulting compose in the single `conf.target_dir` directory. There are at least two use-cases for which it would make sense to store some composes in different locations: - The compose contains data which cannot be accessed publicly. This is needed by Fedora releng team to generate internal-only composes with packages which cannot be publicly available because of licensing issues. - The ODCS backend generating the compose runs in different geo-location and uses completely different NFS storage for composes. This might be used in the future to allow CI infrastructure to use ODCS and store the resulting composes closer to the CI VMs. This commit allows choosing the resulting target_dir by implementing following points: - New `conf.extra_target_dirs` option is introduced. - The `conf.clients` or `conf.admins` options can allow certain users/groups to use particular target dir defined in `conf.extra_target_dirs`. - New `target_dir` API variable is introduced to choose the target_dir. If unset, default `conf.target_dir` is used. - The `target_dir` is stored in DB using the `Compose` model. - The code to remove expired composes now checks composes in all configured target directories and tries to remove them. If the target directory is not available on particular backend, the compose removal is skipped. This means that only backends which have access to particular target_dir are able to remove expired composes. - The KojiTagCache code is changed to support multiple target directories. Signed-off-by: Jan Kaluza --- diff --git a/server/odcs/server/api_utils.py b/server/odcs/server/api_utils.py index 6d93f36..e035a67 100644 --- a/server/odcs/server/api_utils.py +++ b/server/odcs/server/api_utils.py @@ -121,6 +121,12 @@ def raise_if_input_not_allowed(**kwargs): values = _enum_int_to_str_list(COMPOSE_FLAGS, values) elif name == "results": values = _enum_int_to_str_list(COMPOSE_RESULTS, values) + elif name == "target_dirs": + # The default conf.target_dir is always allowed. + if values == conf.target_dir: + continue + inverse_extra_target_dirs = {v: k for k, v in conf.extra_target_dirs.items()} + values = inverse_extra_target_dirs[values] if type(values) == int: values = [values] diff --git a/server/odcs/server/backend.py b/server/odcs/server/backend.py index 073bfa8..ccd77b5 100644 --- a/server/odcs/server/backend.py +++ b/server/odcs/server/backend.py @@ -208,6 +208,10 @@ class RemoveExpiredComposesThread(BackendThread): composes = Compose.composes_to_expire() for compose in composes: + # Check if target_dir is accessible on this backend and if not, skip it. + if not os.path.exists(compose.target_dir): + continue + log.info("%r: Removing compose", compose) if compose.removed_by: state_reason = "Removed by {}.".format(compose.removed_by) @@ -228,8 +232,9 @@ class RemoveExpiredComposesThread(BackendThread): # by ODCS. odcs_paths = [] for dirname in ["latest-odcs-*", "odcs-*"]: - path = os.path.join(conf.target_dir, dirname) - odcs_paths += glob.glob(path) + for target_dir in [conf.target_dir] + list(conf.extra_target_dirs.values()): + path = os.path.join(target_dir, dirname) + odcs_paths += glob.glob(path) # Then try removing them if they are left there by some error. for path in odcs_paths: @@ -257,8 +262,7 @@ class RemoveExpiredComposesThread(BackendThread): continue # Remove old Koji tag data from Koji tag cache. - koji_tag_cache = KojiTagCache() - koji_tag_cache.remove_old_koji_tag_cache_data() + KojiTagCache.remove_old_koji_tag_cache_data() def create_koji_session(): @@ -678,13 +682,13 @@ def generate_compose_symlink(compose): Generates symlink(s) for compose based on its `compose.pungi_compose_id`. It generates following symlinks pointing to the compose: - - $conf.target_dir/$compose.compose_type/$compose.pungi_compose_id - - $conf.target_dir/$compose.compose_type/latest-$name-version + - $compose.target_dir/$compose.compose_type/$compose.pungi_compose_id + - $compose.target_dir/$compose.compose_type/latest-$name-version If the latest-* symlink exists, it is replaced with new one pointing to the `composes`. """ - symlink_dir = os.path.join(conf.target_dir, compose.compose_type) + symlink_dir = os.path.join(compose.target_dir, compose.compose_type) odcs.server.utils.makedirs(symlink_dir) # Generate the non-latest symlink. @@ -714,7 +718,7 @@ def remove_compose_symlink(compose): if not compose.compose_type or not compose.pungi_compose_id: return - symlink_dir = os.path.join(conf.target_dir, compose.compose_type) + symlink_dir = os.path.join(compose.target_dir, compose.compose_type) symlink = os.path.join(symlink_dir, compose.pungi_compose_id) # Check if latest_symlink points to the same directory as the non-latest @@ -745,7 +749,7 @@ def generate_pungi_compose(compose): """ Generates the compose of KOJI, TAG, or REPO type using the Pungi tool. """ - koji_tag_cache = KojiTagCache() + koji_tag_cache = KojiTagCache(compose) # Resolve the general data in the compose. resolve_compose(compose) @@ -802,7 +806,7 @@ def generate_pungi_compose(compose): # to Pungi COMPOSE_ID. We can use directory with these symlinks to # find out previous old compose. if compose.source_type == PungiSourceType.RAW_CONFIG: - old_compose = os.path.join(conf.target_dir, compose.compose_type) + old_compose = os.path.join(compose.target_dir, compose.compose_type) pungi = Pungi(compose.id, pungi_cfg, koji_event, old_compose) pungi.run(compose) @@ -900,12 +904,13 @@ def generate_compose(compose_id, lost_compose=False): # Be nice to end user and replace paths to logs or other files with URL # accessible to the user. - state_reason = state_reason.replace(conf.target_dir, conf.target_dir_url) + if compose.target_dir == conf.target_dir: + state_reason = state_reason.replace(conf.target_dir, conf.target_dir_url) compose.transition(COMPOSE_STATES["failed"], state_reason) compose = Compose.query.filter(Compose.id == compose_id).one() - koji_tag_cache = KojiTagCache() + koji_tag_cache = KojiTagCache(compose) koji_tag_cache.cleanup_reused(compose) # Commit the session to ensure that database transaction is closed and diff --git a/server/odcs/server/cache.py b/server/odcs/server/cache.py index d2435b7..6b65160 100644 --- a/server/odcs/server/cache.py +++ b/server/odcs/server/cache.py @@ -44,11 +44,12 @@ class KojiTagCache(object): ODCS compose attributes which influences the repodata. """ - def __init__(self): - self.cache_dir = os.path.join(conf.target_dir, "koji_tag_cache") + def __init__(self, compose): + self.cache_dir = os.path.join(compose.target_dir, "koji_tag_cache") makedirs(self.cache_dir) - def remove_old_koji_tag_cache_data(self): + @classmethod + def remove_old_koji_tag_cache_data(klass): """ Removes the old unused Koji tag cache directories which has not been used for more than `conf.koji_tag_cache_cleanup_timeout` days. @@ -58,24 +59,29 @@ class KojiTagCache(object): older_than_seconds = conf.koji_tag_cache_cleanup_timeout * 24 * 3600 threshold = time.time() - older_than_seconds - for cached_dir in os.listdir(self.cache_dir): - path = os.path.join(self.cache_dir, cached_dir) - try: - mtime = os.path.getmtime(path) - except OSError: - # Directory might have been removed in meantime by some - # 3rd party process. - log.exception( - "Old koji tag cache directory %s removed while checking " - "Koji cache." % path) + for target_dir in [conf.target_dir] + list(conf.extra_target_dirs.values()): + cache_dir = os.path.join(target_dir, "koji_tag_cache") + if not os.path.exists(cache_dir): continue - if mtime > threshold: - # Koji tag directory is not old enough to be removed. - continue - - log.info("Removing old Koji tag cache data in %s." % path) - shutil.rmtree(path) + for cached_dir in os.listdir(cache_dir): + path = os.path.join(cache_dir, cached_dir) + try: + mtime = os.path.getmtime(path) + except OSError: + # Directory might have been removed in meantime by some + # 3rd party process. + log.exception( + "Old koji tag cache directory %s removed while checking " + "Koji cache." % path) + continue + + if mtime > threshold: + # Koji tag directory is not old enough to be removed. + continue + + log.info("Removing old Koji tag cache data in %s." % path) + shutil.rmtree(path) def cached_compose_dir(self, compose): """ diff --git a/server/odcs/server/config.py b/server/odcs/server/config.py index dc760a5..6d9355f 100644 --- a/server/odcs/server/config.py +++ b/server/odcs/server/config.py @@ -149,6 +149,12 @@ class Config(object): 'type': str, 'default': "http://localhost/odcs", 'desc': 'Public facing URL to target_dir.'}, + 'extra_target_dirs': { + 'type': dict, + 'default': {}, + 'desc': 'Extra target_dirs to optionally store the compose on ' + 'instead of the conf.target_dir. Key is the name ' + 'of volume, value if path to target_dir.'}, 'seconds_to_live': { 'type': int, 'default': 24 * 60 * 60, diff --git a/server/odcs/server/mergerepo.py b/server/odcs/server/mergerepo.py index 8caf332..eb4fe4d 100644 --- a/server/odcs/server/mergerepo.py +++ b/server/odcs/server/mergerepo.py @@ -145,7 +145,7 @@ class MergeRepo(object): # Generate the pulp_repo_cache structure and locks for each repo. for repo in repos: repo_path = os.path.join( - conf.target_dir, "pulp_repo_cache", repo.replace(repo_prefix, "")) + self.compose.target_dir, "pulp_repo_cache", repo.replace(repo_prefix, "")) repo_paths.append(repo_path) makedirs(repo_path) @@ -169,7 +169,7 @@ class MergeRepo(object): args = [mergerepo_exe, "--method", "nvr", "-o", result_repo_dir] - args += ["--repo-prefix-search", os.path.join(conf.target_dir, "pulp_repo_cache")] + args += ["--repo-prefix-search", os.path.join(self.compose.target_dir, "pulp_repo_cache")] args += ["--repo-prefix-replace", repo_prefix] for repo in repo_paths: args.append("-r") diff --git a/server/odcs/server/migrations/versions/812f2745248f_.py b/server/odcs/server/migrations/versions/812f2745248f_.py new file mode 100644 index 0000000..b4cf225 --- /dev/null +++ b/server/odcs/server/migrations/versions/812f2745248f_.py @@ -0,0 +1,23 @@ +"""Add target_dir column. + +Revision ID: 812f2745248f +Revises: a855c39e2a0f +Create Date: 2020-03-13 13:28:24.381076 + +""" + +# revision identifiers, used by Alembic. +revision = '812f2745248f' +down_revision = 'a855c39e2a0f' + +from alembic import op +import sqlalchemy as sa +from odcs.server import conf + + +def upgrade(): + op.add_column('composes', sa.Column('target_dir', sa.String(), nullable=True, default=conf.target_dir)) + + +def downgrade(): + op.drop_column('composes', 'target_dir') diff --git a/server/odcs/server/models.py b/server/odcs/server/models.py index 42202a6..3277f04 100644 --- a/server/odcs/server/models.py +++ b/server/odcs/server/models.py @@ -159,6 +159,9 @@ class Compose(ODCSBase): pungi_config_dump = db.Column(db.String, nullable=True) # UUID of the celery task. celery_task_id = db.Column(db.String, nullable=True) + # Target directory in which the compose is stored. This is `conf.target_dir` + # by default. + target_dir = db.Column(db.String) @classmethod def create(cls, session, owner, source_type, source, results, @@ -166,7 +169,7 @@ class Compose(ODCSBase): koji_event=None, arches=None, multilib_arches=None, multilib_method=None, builds=None, lookaside_repos=None, modular_koji_tags=None, module_defaults_url=None, - label=None, compose_type=None): + label=None, compose_type=None, target_dir=None): now = datetime.utcnow() compose = cls( owner=owner, @@ -189,6 +192,7 @@ class Compose(ODCSBase): module_defaults_url=module_defaults_url, label=label, compose_type=compose_type, + target_dir=target_dir or conf.target_dir, ) session.add(compose) return compose @@ -231,6 +235,7 @@ class Compose(ODCSBase): pungi_compose_id=None, # Also reset celery task_id celery_task_id=None, + target_dir=compose.target_dir, ) session.add(compose) return compose @@ -254,7 +259,7 @@ class Compose(ODCSBase): # a race between we start Pungi and when Pungi generates that dir, # so just use `glob` to find out the rigth directory. glob_str = os.path.join( - conf.target_dir, "odcs-%d-1-*.n.0" % self.id) + self.target_dir, "odcs-%d-1-*.n.0" % self.id) toplevel_dirs = glob.glob(glob_str) if toplevel_dirs: return toplevel_dirs[0] @@ -266,7 +271,7 @@ class Compose(ODCSBase): toplevel_dir = self.toplevel_work_dir if toplevel_dir: return toplevel_dir - return os.path.join(conf.target_dir, self.latest_dir) + return os.path.join(self.target_dir, self.latest_dir) @property def result_repo_dir(self): @@ -281,6 +286,9 @@ class Compose(ODCSBase): """ Returns public URL to compose directory with per-arch repositories. """ + if self.target_dir != conf.target_dir: + return "" + target_dir_url = conf.target_dir_url return target_dir_url + "/" \ @@ -299,6 +307,9 @@ class Compose(ODCSBase): """ Returns public URL to repofile. """ + if self.target_dir != conf.target_dir: + return "" + target_dir_url = conf.target_dir_url return target_dir_url + "/" \ + os.path.join(self.latest_dir, "compose", "Temporary", diff --git a/server/odcs/server/pungi.py b/server/odcs/server/pungi.py index 6231719..368b6a7 100644 --- a/server/odcs/server/pungi.py +++ b/server/odcs/server/pungi.py @@ -403,9 +403,9 @@ class Pungi(object): try: td = tempfile.mkdtemp() self._write_cfgs(td) - compose_dir = self._prepare_compose_dir(compose, td, conf.target_dir) + compose_dir = self._prepare_compose_dir(compose, td, compose.target_dir) self.pungi_cfg.validate(td, compose_dir) - pungi_cmd = self.get_pungi_cmd(td, conf.target_dir, compose_dir) + pungi_cmd = self.get_pungi_cmd(td, compose.target_dir, compose_dir) # Commit the session to ensure that all the `compose` changes are # stored database before executing the compose and are not just @@ -511,8 +511,9 @@ class PungiLogs(object): continue errors += error - errors = errors.replace( - conf.target_dir, conf.target_dir_url) + if self.compose.target_dir == conf.target_dir: + errors = errors.replace( + conf.target_dir, conf.target_dir_url) return errors def get_config_dump(self): diff --git a/server/odcs/server/views.py b/server/odcs/server/views.py index e80417e..f471b7c 100644 --- a/server/odcs/server/views.py +++ b/server/odcs/server/views.py @@ -414,9 +414,18 @@ class ODCSAPI(MethodView): label = data.get("label", None) compose_type = data.get("compose_type", "test") + target_dir = data.get("target_dir") + if target_dir: + if target_dir not in conf.extra_target_dirs: + raise ValueError('Unknown "target_dir" "%s"' % target_dir) + target_dir = conf.extra_target_dirs[target_dir] + else: + target_dir = conf.target_dir + raise_if_input_not_allowed( source_types=source_type, sources=source, results=results, - flags=flags, arches=arches, compose_types=compose_type) + flags=flags, arches=arches, compose_types=compose_type, + target_dirs=target_dir) compose = Compose.create( db.session, self._get_compose_owner(), source_type, source, @@ -429,7 +438,8 @@ class ODCSAPI(MethodView): modular_koji_tags=modular_koji_tags, module_defaults_url=module_defaults, label=label, - compose_type=compose_type) + compose_type=compose_type, + target_dir=target_dir) db.session.add(compose) # Flush is needed, because we use `before_commit` SQLAlchemy event to # send message and before_commit can be called before flush and diff --git a/server/tests/test_cache.py b/server/tests/test_cache.py index eecee77..188452e 100644 --- a/server/tests/test_cache.py +++ b/server/tests/test_cache.py @@ -22,9 +22,10 @@ import time import os -from mock import patch +from mock import patch, MagicMock, call from .utils import ModelsBaseTest +import odcs.server from odcs.server import db, conf from odcs.server.cache import KojiTagCache from odcs.server.models import Compose @@ -36,7 +37,9 @@ class TestKojiTagCache(ModelsBaseTest): def setUp(self): super(TestKojiTagCache, self).setUp() - self.cache = KojiTagCache() + compose = MagicMock() + compose.target_dir = conf.target_dir + self.cache = KojiTagCache(compose) def test_cached_compose_dir(self): c = Compose.create( @@ -142,22 +145,30 @@ class TestKojiTagCache(ModelsBaseTest): @patch("os.listdir") @patch("os.path.getmtime") @patch("shutil.rmtree") - def test_remove_old_koji_tag_cache_data(self, rmtree, getmtime, listdir): + @patch("os.path.exists") + @patch.object(odcs.server.config.Config, 'extra_target_dirs', + new={"releng-private": "/tmp/private"}) + def test_remove_old_koji_tag_cache_data(self, exists, rmtree, getmtime, listdir): + exists.return_value = True now = time.time() - listdir.return_value = ["foo", "bar"] + listdir.side_effect = [["foo", "bar"], ["foo", "bar"]] # Default timeout is 30 days, so set 31 for first dir and 29 for # second dir. - getmtime.side_effect = [now - 31 * 24 * 3600, now - 29 * 24 * 3600] + getmtime.side_effect = [now - 31 * 24 * 3600, now - 29 * 24 * 3600] * 2 self.cache.remove_old_koji_tag_cache_data() - rmtree.assert_called_once_with( - os.path.join(self.cache.cache_dir, "foo")) + self.assertEqual( + rmtree.call_args_list, + [call(os.path.join(self.cache.cache_dir, "foo")), + call("/tmp/private/koji_tag_cache/foo")]) @patch("os.listdir") @patch("os.path.getmtime") @patch("shutil.rmtree") + @patch("os.path.exists") def test_remove_old_koji_tag_cache_data_getmtime_raises( - self, rmtree, getmtime, listdir): + self, exists, rmtree, getmtime, listdir): + exists.return_value = True listdir.return_value = ["foo", "bar"] getmtime.side_effect = OSError("path does not exist") diff --git a/server/tests/test_pungi.py b/server/tests/test_pungi.py index 77b06da..24ad7e3 100644 --- a/server/tests/test_pungi.py +++ b/server/tests/test_pungi.py @@ -371,6 +371,7 @@ class TestPungi(ModelsBaseTest): self.ci_dump = self.patch_ci_dump.start() self.compose = MagicMock() + self.compose.target_dir = conf.target_dir self.compose.compose_type = "test" def tearDown(self): diff --git a/server/tests/test_remove_expired_composes_thread.py b/server/tests/test_remove_expired_composes_thread.py index 59621c7..3c21e1a 100644 --- a/server/tests/test_remove_expired_composes_thread.py +++ b/server/tests/test_remove_expired_composes_thread.py @@ -20,6 +20,7 @@ # # Written by Jan Kaluza +import odcs.server from odcs.server import db, conf from odcs.server.models import Compose from odcs.common.types import COMPOSE_STATES, COMPOSE_RESULTS @@ -145,6 +146,22 @@ class TestRemoveExpiredComposesThread(ModelsBaseTest): @patch("os.path.isdir") @patch("glob.glob") @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir") + @patch.object(odcs.server.config.Config, 'extra_target_dirs', + new={"releng-private": "/tmp/private"}) + def test_remove_left_composes_extra_target_dir(self, remove_compose_dir, glob, isdir): + isdir.return_value = True + self.thread.do_work() + print(glob.call_args_list) + self.assertEqual( + glob.call_args_list, + [mock.call(os.path.join(conf.target_dir, "latest-odcs-*")), + mock.call("/tmp/private/latest-odcs-*"), + mock.call(os.path.join(conf.target_dir, "odcs-*")), + mock.call("/tmp/private/odcs-*")]) + + @patch("os.path.isdir") + @patch("glob.glob") + @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir") def test_remove_left_composes_not_dir( self, remove_compose_dir, glob, isdir): isdir.return_value = False diff --git a/server/tests/test_views.py b/server/tests/test_views.py index 6000f0e..4f77d7b 100644 --- a/server/tests/test_views.py +++ b/server/tests/test_views.py @@ -119,7 +119,8 @@ class ViewBaseTest(ModelsBaseTest): 'compose_types': ["test", "nightly"] }, 'dev3': { - 'source_types': ['tag'] + 'source_types': ['tag'], + 'target_dirs': ["releng-private"] } } } @@ -534,6 +535,58 @@ class TestViews(ViewBaseTest): c = db.session.query(Compose).filter(Compose.id == 1).one() self.assertEqual(c.state, COMPOSE_STATES["wait"]) + def test_submit_build_target_dir_unknown(self): + with self.test_request_context(user='dev'): + flask.g.oidc_scopes = [ + '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose') + ] + + rv = self.client.post('/api/1/composes/', data=json.dumps( + {'source': {'type': 'tag', 'source': 'f26'}, + 'target_dir': 'foo'})) + data = json.loads(rv.get_data(as_text=True)) + + self.assertEqual(data['status'], 400) + self.assertEqual(data['error'], 'Bad Request') + self.assertEqual(data['message'], 'Unknown "target_dir" "foo"') + + @patch.object(odcs.server.config.Config, 'extra_target_dirs', + new={"releng-private": "/tmp/private"}) + def test_submit_build_target_not_allowed(self): + with self.test_request_context(user='dev'): + flask.g.oidc_scopes = [ + '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose') + ] + + rv = self.client.post('/api/1/composes/', data=json.dumps( + {'source': {'type': 'tag', 'source': 'f26'}, + 'target_dir': 'releng-private'})) + data = json.loads(rv.get_data(as_text=True)) + + self.assertEqual(data['status'], 403) + self.assertEqual(data['error'], 'Forbidden') + self.assertEqual( + data['message'], + 'User dev not allowed to operate with compose with target_dirs=releng-private.') + + @patch.object(odcs.server.config.Config, 'extra_target_dirs', + new={"releng-private": "/tmp/private"}) + def test_submit_build_target_dir(self): + with self.test_request_context(user='dev3'): + flask.g.oidc_scopes = [ + '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose') + ] + + rv = self.client.post('/api/1/composes/', data=json.dumps( + {'source': {'type': 'tag', 'source': 'f26'}, + 'target_dir': 'releng-private'})) + self.assertEqual(rv.status, '200 OK') + + db.session.expire_all() + c = db.session.query(Compose).filter(Compose.id == 3).one() + self.assertEqual(c.state, COMPOSE_STATES["wait"]) + self.assertEqual(c.target_dir, "/tmp/private") + def test_submit_build_module_defaults_url(self): with self.test_request_context(user='dev'): flask.g.oidc_scopes = [