From c2baa3355ae1ba1786d372c0b27da75acf70002b Mon Sep 17 00:00:00 2001 From: Josef Skladanka Date: Feb 02 2021 07:53:14 +0000 Subject: Get rid of the local caching --- diff --git a/conf/settings.py.example b/conf/settings.py.example index 0ea0c6b..464f7cc 100644 --- a/conf/settings.py.example +++ b/conf/settings.py.example @@ -22,13 +22,11 @@ CELERY_RESULT_BACKEND = 'redis://localhost:6379' MAX_DB_AGE = 14400 # Max cache age allowed in seconds (240 minutes) # IGNORE_CACHE_TIMESTAMP sets how oraculum verifies data aga in cache -# values can be 'local', 'db' or None -# "local" - always trust both local and database caches, plan refresh if database data are missing -# "db" - verify only local cache age, always trust database cache, plan refresh if database data are missing -# None - verify age of both, local and database caches, plan refresh if data are too old +# values can be 'db' or None +# "db" - always trust database cache, plan refresh if database data are missing +# None - verify age of database caches, plan refresh if data are too old IGNORE_CACHE_TIMESTAMP = 'db' -ENABLE_LOCAL_CACHE = False ACTIVITY_REQUIRED = 14 # Cache data for users who used the service in at least last 14 days diff --git a/oraculum/__init__.py b/oraculum/__init__.py index 46fe949..1f2cd7f 100644 --- a/oraculum/__init__.py +++ b/oraculum/__init__.py @@ -89,10 +89,6 @@ else: default_config_obj = 'oraculum.config.ProductionConfig' default_config_dir = '/etc/oraculum' -if 'celery' in sys.argv: - default_config_obj = 'oraculum.config.CeleryWorkerConfig' - default_config_file = 'celery_app_settings.py' - app.config.from_object(default_config_obj) diff --git a/oraculum/config.py b/oraculum/config.py index 9b0ab9e..1af0ee0 100644 --- a/oraculum/config.py +++ b/oraculum/config.py @@ -49,13 +49,10 @@ class Config(object): CELERY_BROKER_URL = 'redis://localhost:6379' CELERY_RESULT_BACKEND = 'redis://localhost:6379' - ENABLE_LOCAL_CACHE = False - # IGNORE_CACHE_TIMESTAMP sets how oraculum verifies data aga in cache - # values can be 'local', 'db' or None - # "local" - always trust both local and database caches, plan refresh if database data are missing - # "db" - verify only local cache age, always trust database cache, plan refresh if database data are missing - # None - verify age of both, local and database caches, plan refresh if data are too old + # values can be 'db' or None + # "db" - always trust database cache, plan refresh if database data are missing + # None - verify age of database caches, plan refresh if data are too old IGNORE_CACHE_TIMESTAMP = 'db' MAX_DB_AGE = 14400 # keep data cached for 240 minutes @@ -150,10 +147,6 @@ class TestingConfig(Config): TESTING = True -class CeleryWorkerConfig(Config): - ENABLE_LOCAL_CACHE = False - - def openshift_config(config_object, openshift_production): # First, get db details from env try: diff --git a/oraculum/utils/cache_utils.py b/oraculum/utils/cache_utils.py index 7279106..aa74f40 100644 --- a/oraculum/utils/cache_utils.py +++ b/oraculum/utils/cache_utils.py @@ -40,16 +40,9 @@ class AsyncRefreshInProgress(): pass -class CachedObject(): - def __init__(self, time_created, data): - self.time_created = time_created - self.data = data - - class Cached(): def __init__(self, max_cache_age): self._refreshers = {} - self._local_cache = {} self._max_cache_age = datetime.timedelta(seconds=max_cache_age) # getting the values @@ -71,10 +64,6 @@ class Cached(): def get_refreshed_time(self, item, *args, **kwargs): what = self._construct_what(item, *args, **kwargs) - local = self._local_cache.get(what) - if self._new_enough(local): - return local.time_created - from_db = CachedData.query.filter_by(provider=what).first() if self._new_enough(from_db): return from_db.time_created @@ -87,15 +76,8 @@ class Cached(): what = self._construct_what(item, *args, **kwargs) - if self._new_enough(self._local_cache.get(what)): - return self._local_cache[what].data - - #app.logger.debug("local cache cache miss on %s" % what) from_db = CachedData.query.filter_by(provider=what).first() - if from_db and self._new_enough(from_db): - if app.config['ENABLE_LOCAL_CACHE']: - self._local_cache[what] = CachedObject(from_db.time_created, from_db.data) return from_db.data #app.logger.debug("db cache miss on %s" % what) @@ -118,13 +100,6 @@ class Cached(): for item in items: what = self._construct_what(what_base, item) - - if app.config['ENABLE_LOCAL_CACHE']: - cached = self._local_cache.get(what) - if self._new_enough(cached): - data[item] = cached.data - continue - whats.add(what) what_to_item[what] = item @@ -143,8 +118,6 @@ class Cached(): item = what_to_item[result.provider] data[item] = result.data seen.add(item) - if app.config['ENABLE_LOCAL_CACHE']: - self._local_cache[result.provider] = CachedObject(result.time_created, result.data) # Schedule refresh for any missing result that we've asked for missing = set(items).difference(seen) @@ -163,8 +136,6 @@ class Cached(): def _new_enough(self, cached_object): if cached_object is None: return False - if app.config['IGNORE_CACHE_TIMESTAMP'] == "local": - return True # If cached_object is an instance of CachedData, it is from the database if app.config['IGNORE_CACHE_TIMESTAMP'] == "db" and isinstance(cached_object, CachedData): return True @@ -199,8 +170,6 @@ class Cached(): # Store time of sync finish watchdog_utils.push_to_watchdog("sync_ended", what) - if app.config['ENABLE_LOCAL_CACHE']: - self._local_cache[what] = CachedObject(row.time_created, data) return data