| |
@@ -42,10 +42,10 @@
|
| |
|
| |
class BaseStore(Log):
|
| |
# Some helper functions used for upgrades
|
| |
- def add_constraint(self, constraint):
|
| |
+ def add_constraint(self, constraint, connection):
|
| |
raise NotImplementedError()
|
| |
|
| |
- def add_index(self, index):
|
| |
+ def add_index(self, index, connection):
|
| |
raise NotImplementedError()
|
| |
|
| |
|
| |
@@ -84,17 +84,17 @@
|
| |
**pool_args)
|
| |
self.is_readonly = False
|
| |
|
| |
- def add_constraint(self, constraint):
|
| |
+ def add_constraint(self, constraint, connection):
|
| |
if self._dbengine.dialect.name != 'sqlite':
|
| |
# It is impossible to add constraints to a pre-existing table for
|
| |
# SQLite
|
| |
# source: http://www.sqlite.org/omitted.html
|
| |
- create_constraint = AddConstraint(constraint, bind=self._dbengine)
|
| |
- create_constraint.execute()
|
| |
+ create_constraint = AddConstraint(constraint)
|
| |
+ connection.execute(create_constraint)
|
| |
|
| |
- def add_index(self, index):
|
| |
- add_index = CreateIndex(index, bind=self._dbengine)
|
| |
- add_index.execute()
|
| |
+ def add_index(self, index, connection):
|
| |
+ add_index = CreateIndex(index)
|
| |
+ connection.execute(add_index)
|
| |
|
| |
def debug(self, fact):
|
| |
if self.db_conn_log:
|
| |
@@ -209,18 +209,14 @@
|
| |
for index in table_def['indexes']:
|
| |
idx_name = 'idx_%s_%s' % (name, '_'.join(index))
|
| |
table_creation.append(Index(idx_name, *index))
|
| |
- table = Table(name, MetaData(self._db.engine()), *table_creation)
|
| |
+ table = Table(name, MetaData(), *table_creation)
|
| |
return table
|
| |
|
| |
def _where(self, kvfilter):
|
| |
- where = None
|
| |
+ where = []
|
| |
if kvfilter is not None:
|
| |
for k in kvfilter:
|
| |
- w = self._table.c[k] == kvfilter[k]
|
| |
- if where is None:
|
| |
- where = w
|
| |
- else:
|
| |
- where = where & w
|
| |
+ where.append(self._table.c[k] == kvfilter[k])
|
| |
return where
|
| |
|
| |
def _columns(self, columns=None):
|
| |
@@ -244,32 +240,35 @@
|
| |
self._trans.commit()
|
| |
|
| |
def create(self):
|
| |
- self._table.create(checkfirst=True)
|
| |
+ self._table.create(self._db.engine(), checkfirst=True)
|
| |
|
| |
def drop(self):
|
| |
- self._table.drop(checkfirst=True)
|
| |
+ self._table.drop(self._db.engine(), checkfirst=True)
|
| |
|
| |
def select(self, kvfilter=None, columns=None):
|
| |
- return self._con.execute(select(self._columns(columns),
|
| |
- self._where(kvfilter)))
|
| |
+ return self._con.execute(
|
| |
+ select(*self._columns(columns)).where(*self._where(kvfilter))
|
| |
+ )
|
| |
|
| |
def insert(self, values, ttl=None):
|
| |
- self._con.execute(self._table.insert(values))
|
| |
+ self._con.execute(self._table.insert().values(values))
|
| |
|
| |
def update(self, values, kvfilter):
|
| |
- self._con.execute(self._table.update(self._where(kvfilter), values))
|
| |
+ self._con.execute(
|
| |
+ self._table.update().where(*self._where(kvfilter)).values(values)
|
| |
+ )
|
| |
|
| |
def delete(self, kvfilter):
|
| |
- self._con.execute(self._table.delete(self._where(kvfilter)))
|
| |
+ self._con.execute(self._table.delete().where(*self._where(kvfilter)))
|
| |
|
| |
def perform_auto_cleanup(self):
|
| |
table = self._table
|
| |
- sel = select([table.c.uuid]). \
|
| |
+ sel = select(table.c.uuid). \
|
| |
where(and_(table.c.name == 'expiration_time',
|
| |
table.c.value <= str(datetime.datetime.now())))
|
| |
# pylint: disable=no-value-for-parameter
|
| |
d = table.delete().where(table.c.uuid.in_(sel))
|
| |
- return d.execute().rowcount
|
| |
+ return self._con.execute(d).rowcount
|
| |
|
| |
|
| |
class FileStore(BaseStore):
|
| |
@@ -296,10 +295,10 @@
|
| |
self._timestamp = timestamp
|
| |
return self._config
|
| |
|
| |
- def add_constraint(self, constraint):
|
| |
+ def add_constraint(self, constraint, connection):
|
| |
raise NotImplementedError()
|
| |
|
| |
- def add_index(self, index):
|
| |
+ def add_index(self, index, connection):
|
| |
raise NotImplementedError()
|
| |
|
| |
|
| |
@@ -430,10 +429,10 @@
|
| |
|
| |
self.is_readonly = False
|
| |
|
| |
- def add_constraint(self, constraint):
|
| |
+ def add_constraint(self, constraint, connection):
|
| |
raise NotImplementedError()
|
| |
|
| |
- def add_index(self, index):
|
| |
+ def add_index(self, index, connection):
|
| |
raise NotImplementedError()
|
| |
|
| |
def close(self):
|
| |
@@ -616,18 +615,12 @@
|
| |
|
| |
return rows
|
| |
|
| |
- def insert(self, value_row, ttl=None):
|
| |
+ def insert(self, values, ttl=None):
|
| |
"""Insert a new object into the store.
|
| |
|
| |
- value_row is a list of column values.
|
| |
+ values is a dict of column values.
|
| |
ttl is the time for which the object is supposed to be kept.
|
| |
"""
|
| |
- value_row = list(value_row)
|
| |
-
|
| |
- values = {}
|
| |
- for column in self._columns:
|
| |
- values[column] = value_row.pop(0)
|
| |
-
|
| |
path, _ = self._get_most_specific_dir(values, False, update=True)
|
| |
self._store.client.write(path, json.dumps(values), ttl=ttl)
|
| |
|
| |
@@ -661,7 +654,9 @@
|
| |
path, levels_unused = self._get_most_specific_dir(kvfilter)
|
| |
if levels_unused == 0 or len(kvfilter) == 0:
|
| |
try:
|
| |
- current = json.loads(self._store.client.read(path).value)
|
| |
+ current = self._store.client.read(path).value
|
| |
+ if current is not None:
|
| |
+ current = json.loads(current)
|
| |
except etcd.EtcdKeyNotFound:
|
| |
return
|
| |
for key in kvfilter:
|
| |
@@ -959,7 +954,7 @@
|
| |
q.update({'value': options[opt]},
|
| |
{'name': name, 'option': opt})
|
| |
else:
|
| |
- q.insert((name, opt, options[opt]))
|
| |
+ q.insert({"name": name, "option": opt, "value": options[opt]})
|
| |
|
| |
for opt in curvals:
|
| |
if opt not in options:
|
| |
@@ -999,9 +994,13 @@
|
| |
with q:
|
| |
try:
|
| |
for name in data:
|
| |
- q.insert((newid, name, data[name]), ttl)
|
| |
+ q.insert({"uuid": newid, "name": name, "value": data[name]}, ttl)
|
| |
if expiration_time:
|
| |
- q.insert((newid, 'expiration_time', expiration_time), ttl)
|
| |
+ q.insert({
|
| |
+ "uuid": newid,
|
| |
+ "name": 'expiration_time',
|
| |
+ "value": expiration_time,
|
| |
+ }, ttl)
|
| |
except Exception as e: # pylint: disable=broad-except
|
| |
self.error("Failed to store %s data: [%s]" % (table, e))
|
| |
raise
|
| |
@@ -1046,7 +1045,11 @@
|
| |
{'uuid': uid, 'name': name})
|
| |
else:
|
| |
if datum[name] is not None:
|
| |
- q.insert((uid, name, datum[name]), ttl)
|
| |
+ q.insert({
|
| |
+ "uuid": uid,
|
| |
+ "name": name,
|
| |
+ "value": datum[name],
|
| |
+ }, ttl)
|
| |
|
| |
except Exception as e: # pylint: disable=broad-except
|
| |
self.error("Failed to store data in %s: [%s]" % (table, e))
|
| |
@@ -1054,7 +1057,7 @@
|
| |
|
| |
def del_unique_data(self, table, uuidval):
|
| |
kvfilter = {'uuid': uuidval}
|
| |
- q = self._query(self._db, table, UNIQUE_DATA_TABLE, trans=False)
|
| |
+ q = self._query(self._db, table, UNIQUE_DATA_TABLE, trans=True)
|
| |
with q:
|
| |
try:
|
| |
q.delete(kvfilter)
|
| |
@@ -1104,8 +1107,9 @@
|
| |
'login_config',
|
| |
'provider_config',
|
| |
'authz_config']:
|
| |
- q = self._query(self._db, table, OPTIONS_TABLE, trans=False)
|
| |
- q.create()
|
| |
+ q = self._query(self._db, table, OPTIONS_TABLE, trans=True)
|
| |
+ with q:
|
| |
+ q.create()
|
| |
|
| |
def _upgrade_schema(self, old_version):
|
| |
if old_version == 1:
|
| |
@@ -1115,17 +1119,19 @@
|
| |
'login_config',
|
| |
'provider_config']:
|
| |
# pylint: disable=protected-access
|
| |
- table = self._query(self._db, table, OPTIONS_TABLE,
|
| |
- trans=False)._table
|
| |
- self._db.add_constraint(table.primary_key)
|
| |
- for index in table.indexes:
|
| |
- self._db.add_index(index)
|
| |
+ q = self._query(self._db, table, OPTIONS_TABLE, trans=True)
|
| |
+ table = q._table
|
| |
+ with q:
|
| |
+ self._db.add_constraint(table.primary_key, q._con)
|
| |
+ for index in table.indexes:
|
| |
+ self._db.add_index(index, q._con)
|
| |
return 2
|
| |
elif old_version == 2:
|
| |
# Version 3 adds the authz config table
|
| |
q = self._query(self._db, 'authz_config', OPTIONS_TABLE,
|
| |
- trans=False)
|
| |
- q.create()
|
| |
+ trans=True)
|
| |
+ with q:
|
| |
+ q.create()
|
| |
self.save_options('authz_config', 'global', {'enabled': 'allow'})
|
| |
return 3
|
| |
else:
|
| |
@@ -1135,8 +1141,9 @@
|
| |
if not self.is_readonly:
|
| |
table = plugin_name+'_data'
|
| |
q = self._query(self._db, table, UNIQUE_DATA_TABLE,
|
| |
- trans=False)
|
| |
- q.create()
|
| |
+ trans=True)
|
| |
+ with q:
|
| |
+ q.create()
|
| |
|
| |
|
| |
class UserStore(Store):
|
| |
@@ -1174,7 +1181,7 @@
|
| |
q.update({'value': parameters}, {'name': user,
|
| |
'option': key})
|
| |
else:
|
| |
- q.insert((user, key, parameters))
|
| |
+ q.insert({"name": user, "option": key, "value": parameters})
|
| |
except Exception as e: # pylint: disable=broad-except
|
| |
self.error('Failed to store consent: [%s]' % e)
|
| |
raise
|
| |
@@ -1220,8 +1227,9 @@
|
| |
return d
|
| |
|
| |
def _initialize_table(self, tablename):
|
| |
- q = self._query(self._db, tablename, OPTIONS_TABLE, trans=False)
|
| |
- q.create()
|
| |
+ q = self._query(self._db, tablename, OPTIONS_TABLE, trans=True)
|
| |
+ with q:
|
| |
+ q.create()
|
| |
|
| |
def _initialize_schema(self):
|
| |
self._initialize_table('users')
|
| |
@@ -1231,11 +1239,12 @@
|
| |
if old_version == 1:
|
| |
# In schema version 2, we added indexes and primary keys
|
| |
# pylint: disable=protected-access
|
| |
- table = self._query(self._db, 'users', OPTIONS_TABLE,
|
| |
- trans=False)._table
|
| |
- self._db.add_constraint(table.primary_key)
|
| |
- for index in table.indexes:
|
| |
- self._db.add_index(index)
|
| |
+ q = self._query(self._db, 'users', OPTIONS_TABLE, trans=True)
|
| |
+ table = q._table
|
| |
+ with q:
|
| |
+ self._db.add_constraint(table.primary_key, q._con)
|
| |
+ for index in table.indexes:
|
| |
+ self._db.add_index(index, q._con)
|
| |
return 2
|
| |
elif old_version == 2:
|
| |
# In schema 3 for UserStore, we added user_consent
|
| |
@@ -1259,18 +1268,20 @@
|
| |
|
| |
def _initialize_schema(self):
|
| |
q = self._query(self._db, self.table, UNIQUE_DATA_TABLE,
|
| |
- trans=False)
|
| |
- q.create()
|
| |
+ trans=True)
|
| |
+ with q:
|
| |
+ q.create()
|
| |
|
| |
def _upgrade_schema(self, old_version):
|
| |
if old_version == 1:
|
| |
# In schema version 2, we added indexes and primary keys
|
| |
# pylint: disable=protected-access
|
| |
- table = self._query(self._db, self.table, UNIQUE_DATA_TABLE,
|
| |
- trans=False)._table
|
| |
- self._db.add_constraint(table.primary_key)
|
| |
- for index in table.indexes:
|
| |
- self._db.add_index(index)
|
| |
+ q = self._query(self._db, self.table, UNIQUE_DATA_TABLE, trans=True)
|
| |
+ table = q._table
|
| |
+ with q:
|
| |
+ self._db.add_constraint(table.primary_key, q._con)
|
| |
+ for index in table.indexes:
|
| |
+ self._db.add_index(index, q._con)
|
| |
return 2
|
| |
elif old_version == 2:
|
| |
return 3
|
| |
@@ -1357,18 +1368,20 @@
|
| |
|
| |
def _initialize_schema(self):
|
| |
q = self._query(self._db, self.table, UNIQUE_DATA_TABLE,
|
| |
- trans=False)
|
| |
- q.create()
|
| |
+ trans=True)
|
| |
+ with q:
|
| |
+ q.create()
|
| |
|
| |
def _upgrade_schema(self, old_version):
|
| |
if old_version == 1:
|
| |
# In schema version 2, we added indexes and primary keys
|
| |
# pylint: disable=protected-access
|
| |
- table = self._query(self._db, self.table, UNIQUE_DATA_TABLE,
|
| |
- trans=False)._table
|
| |
- self._db.add_constraint(table.primary_key)
|
| |
- for index in table.indexes:
|
| |
- self._db.add_index(index)
|
| |
+ q = self._query(self._db, self.table, UNIQUE_DATA_TABLE, trans=True)
|
| |
+ table = q._table
|
| |
+ with q:
|
| |
+ self._db.add_constraint(table.primary_key, q._con)
|
| |
+ for index in table.indexes:
|
| |
+ self._db.add_index(index, q._con)
|
| |
return 2
|
| |
elif old_version == 2:
|
| |
return 3
|
| |
These commits adjust to changes in our dependencies and make Ipsilon work on Fedora 40:
- sqlalchemy 1.4 & 2.0
- mod_auth_openidc
- mod_auth_mellon
- libsss packaging
- python 3.12
Fixes: #404