#408 Keep up with dependency changes
Merged 5 months ago by ngompa. Opened 5 months ago by abompard.
abompard/ipsilon sqla-20  into  master

file modified
+1 -2
@@ -24,8 +24,7 @@ 

  

      try{

          stage('Pre Setup Node'){

-             onmyduffynode 'yum -y install @development docker'

-             onmyduffynode 'systemctl start docker'

+             onmyduffynode 'yum -y install @development podman'

          }

  

          stage('Clone Test Suite') {

file modified
+27 -28
@@ -24,7 +24,7 @@ 

  	python -c 'import cherrypy'

  	python -c 'import M2Crypto'

  	python -c 'import lasso'

- 	python -c '__requires__ = ["sqlalchemy >= 0.8"]; import pkg_resources; import sqlalchemy'

+ 	python -c 'import importlib.metadata; assert tuple(int(n) for n in importlib.metadata.version("sqlalchemy").split(".")[:2]) >= (1, 4)'

  	python -c 'import ldap'

  	python -c 'import pam'

  	python -c 'import fedora'
@@ -32,7 +32,6 @@ 

  	python -c 'import psycopg2'

  	# And now everything else

  	ls /usr/lib*/security/pam_sss.so

- 	ls /usr/lib*/libsss_simpleifp.so.0

  	ls /usr/lib*/httpd/modules/mod_wsgi_python3.so

  	ls /usr/libexec/mod_auth_mellon

  
@@ -136,64 +135,64 @@ 

  # Running within containers

  container-quickrun:

  	echo "Building quickrun container ..."

- 	(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-dev tests/containers/Dockerfile-fedora tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/fedora:latest/' | docker build -f - -t ipsilon-quickrun .

+ 	(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-dev tests/containers/Dockerfile-fedora tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/fedora:latest/' | podman build -f - -t ipsilon-quickrun .

  	echo "quickrun container built"

  

  quickrun: container-quickrun

  	echo "Starting Quickrun ..."

- 	docker run -v `pwd`:/code:z -t --rm -it -p 8080 ipsilon-quickrun

+ 	podman run -v `pwd`:/code:z -t --rm -it -p 8080 ipsilon-quickrun

  

  # Testing within containers

  container-centos8-stream:

  	@echo "Building CentOS 8 Stream container ..."

- 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-centos8 tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/centos:stream8/' | docker build -f - -q -t ipsilon-centos8-stream .

+ 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-centos8 tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/centos:stream8/' | podman build -f - -q -t ipsilon-centos8-stream .

  	@echo "CentOS 8 Stream container built"

  

  container-centos9-stream:

  	@echo "Building CentOS 9 Stream container ..."

- 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-centos9 tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/centos:stream9/' | docker build -f - -q -t ipsilon-centos9-stream .

+ 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-centos9 tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/centos:stream9/' | podman build -f - -q -t ipsilon-centos9-stream .

  	@echo "CentOS 9 Stream container built"

  

- container-fedora37:

- 	@echo "Building Fedora 37 container ..."

- 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-fedora tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/fedora:37/' | docker build -f - -t ipsilon-fedora37 .

- 	@echo "Fedora 37 container built"

+ container-fedora39:

+ 	@echo "Building Fedora 39 container ..."

+ 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-fedora tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/fedora:39/' | podman build -f - -t ipsilon-fedora39 .

+ 	@echo "Fedora 39 container built"

  

- container-fedora38:

- 	@echo "Building Fedora 38 container ..."

- 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-fedora tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/fedora:38/' | docker build -f - -t ipsilon-fedora38 .

- 	@echo "Fedora 38 container built"

+ container-fedora40:

+ 	@echo "Building Fedora 40 container ..."

+ 	@(cat tests/containers/Dockerfile-base tests/containers/Dockerfile-fedora tests/containers/Dockerfile-rpm tests/containers/Dockerfile-rpm-py3; echo "USER testuser") | sed -e 's/BASE/fedora:40/' | podman build -f - -t ipsilon-fedora40 .

+ 	@echo "Fedora 40 container built"

  

- containers: container-centos9-stream container-fedora37 container-fedora38

+ containers: container-centos9-stream container-fedora39 container-fedora40

  	@echo "Containers built"

  

  containertest-centos8-stream: container-centos8-stream

  	@echo "Starting CentOS 8 Stream tests ..."

- 	@docker run -v `pwd`:/code:z -t --rm ipsilon-centos8-stream

+ 	@podman run -v `pwd`:/code:z -t --rm ipsilon-centos8-stream

  	@echo "CentOS 8 Stream passed"

  

  containertest-centos9-stream: container-centos9-stream

  	@echo "Starting CentOS 9 Stream tests ..."

- 	@docker run -v `pwd`:/code:z -t --rm ipsilon-centos9-stream

+ 	@podman run -v `pwd`:/code:z -t --rm ipsilon-centos9-stream

  	@echo "CentOS 9 Stream passed"

  

- containertest-fedora37: container-fedora37

- 	@echo "Starting Fedora 37 tests ..."

- 	@docker run -v `pwd`:/code:z -t --rm ipsilon-fedora37

- 	@echo "Fedora 37 passed"

+ containertest-fedora39: container-fedora39

+ 	@echo "Starting Fedora 39 tests ..."

+ 	@podman run -v `pwd`:/code:z -t --rm ipsilon-fedora39

+ 	@echo "Fedora 39 passed"

  

  # This containertest is failing on lasso library randomly

  # Removing it from containertest till new version of lasso library is available

  # https://src.fedoraproject.org/rpms/lasso

- containertest-fedora38: container-fedora38

- 	@echo "Starting Fedora 38 tests ..."

- 	@docker run -v `pwd`:/code:z -t --rm ipsilon-fedora38

- 	@echo "Fedora 38 passed"

+ containertest-fedora40: container-fedora40

+ 	@echo "Starting Fedora 40 tests ..."

+ 	@podman run -v `pwd`:/code:z -t --rm ipsilon-fedora40

+ 	@echo "Fedora 40 passed"

  

- containertest-lint: container-fedora37

+ containertest-lint: container-fedora39

  	@echo "Starting code lint tests ..."

- 	@docker run -v `pwd`:/code:z -t --rm --entrypoint /usr/bin/make ipsilon-fedora37 lint security

+ 	@podman run -v `pwd`:/code:z -t --rm --entrypoint /usr/bin/make ipsilon-fedora39 lint security

  	@echo "Code lint tests passed"

  

- containertest: containertest-lint containertest-centos9-stream containertest-centos8-stream containertest-fedora37

+ containertest: containertest-lint containertest-centos9-stream containertest-centos8-stream containertest-fedora39 containertest-fedora40

  	@echo "Container tests passed"

file modified
+5 -3
@@ -9,7 +9,8 @@ 

    config.hostmanager.manage_guest = true

  

    config.vm.define "freeipa" do |freeipa|

-     freeipa.vm.box = "fedora/38-cloud-base"

+     freeipa.vm.box_url = "https://download.fedoraproject.org/pub/fedora/linux/releases/40/Cloud/x86_64/images/Fedora-Cloud-Base-Vagrant-libvirt.x86_64-40-1.14.vagrant.libvirt.box"

+     freeipa.vm.box = "f40-cloud-libvirt"

      freeipa.vm.hostname = "ipa.ipsilon.test"

      freeipa.hostmanager.aliases = ("kerberos.ipsilon.test")

  
@@ -26,15 +27,16 @@ 

    end

  

    config.vm.define "ipsilon" do |ipsilon|

-     ipsilon.vm.box = "fedora/37-cloud-base"

      ipsilon.vm.hostname = "ipsilon.test"

+     ipsilon.vm.box_url = "https://download.fedoraproject.org/pub/fedora/linux/releases/40/Cloud/x86_64/images/Fedora-Cloud-Base-Vagrant-libvirt.x86_64-40-1.14.vagrant.libvirt.box"

+     ipsilon.vm.box = "f40-cloud-libvirt"

  

      ipsilon.vm.synced_folder ".", "/vagrant", disabled: true

      ipsilon.vm.synced_folder ".", "/home/vagrant/ipsilon", type: "sshfs" # , rsync__exclude: ['./testdir*']

  

      ipsilon.vm.provider :libvirt do |libvirt|

        libvirt.cpus = 2

-       libvirt.memory = 1024

+       libvirt.memory = 2048

      end

  

      ipsilon.vm.provision "ansible" do |ansible|

@@ -204,7 +204,6 @@ 

  License:        GPLv3+

  Requires:       %{name}-base = %{version}-%{release}

  Requires:       python3-sssdconfig

- Requires:       libsss_simpleifp

  Requires:       sssd >= 1.12.4

  BuildArch:      noarch

  

@@ -47,7 +47,6 @@ 

        - openldap-servers

        - openssl

        - sssd

-       - libsss_simpleifp

        - gcc

        - krb5-devel

        - krb5-server

@@ -2,9 +2,6 @@ 

  #

  # Copyright (C) 2015 Ipsilon project Contributors, for license see COPYING

  

- __requires__ = ['sqlalchemy >= 0.8']

- import pkg_resources  # pylint: disable=unused-import

- 

  import argparse

  import cherrypy

  import logging

@@ -1,9 +1,6 @@ 

  #!/usr/bin/python

  # Copyright (C) 2014 Ipsilon project Contributors, for license see COPYING

  

- __requires__ = ['sqlalchemy >= 0.8', 'jinja2 >= 2.4']

- import pkg_resources  # pylint: disable=unused-import

- 

  from ipsilon.login.common import LoginMgrsInstall

  from ipsilon.info.common import InfoProviderInstall

  from ipsilon.providers.common import ProvidersInstall
@@ -282,7 +279,7 @@ 

      data_dir = os.path.join(DATADIR, args['instance'])

  

      try:

-         tconf = configparser.SafeConfigParser()

+         tconf = configparser.ConfigParser()

          tconf.read(os.path.join(instance_conf, 'ipsilon.conf'))

          cache_dir = tconf.get('global', 'cache_dir')

      except (configparser.NoOptionError, configparser.NoSectionError):

@@ -2,9 +2,6 @@ 

  #

  # Copyright (C) 2015 Ipsilon project Contributors, for license see COPYING

  

- __requires__ = ['sqlalchemy >= 0.8', 'jinja2 >= 2.4']

- import pkg_resources  # pylint: disable=unused-import

- 

  from argparse import ArgumentParser

  import sys

  import logging

file modified
-2
@@ -5,8 +5,6 @@ 

  # These lines make sure that we have at least a minimum version of some

  # packages, since we depend on features provided by them.

  import __main__

- __main__.__requires__ = ['sqlalchemy >= 0.8', 'jinja2 >= 2.4']

- import pkg_resources  # pylint: disable=unused-import

  

  import sys

  sys.stdout = sys.stderr

@@ -86,16 +86,18 @@ 

          if old_version == 1:

              # In schema version 2, we added indexes and primary keys

              # pylint: disable=protected-access

-             table = self._query(self._db, 'association', UNIQUE_DATA_TABLE,

-                                 trans=False)._table

-             self._db.add_constraint(table.primary_key)

-             for index in table.indexes:

-                 self._db.add_index(index)

-             table = self._query(self._db, 'openid_extensions', OPTIONS_TABLE,

-                                 trans=False)._table

-             self._db.add_constraint(table.primary_key)

-             for index in table.indexes:

-                 self._db.add_index(index)

+             q = self._query(self._db, 'association', UNIQUE_DATA_TABLE, trans=True)

+             table = q._table

+             with q:

+                 self._db.add_constraint(table.primary_key, q._con)

+                 for index in table.indexes:

+                     self._db.add_index(index, q._con)

+             q = self._query(self._db, 'openid_extensions', OPTIONS_TABLE, trans=True)

+             table = q._table

+             with q:

+                 self._db.add_constraint(table.primary_key, q._con)

+                 for index in table.indexes:

+                     self._db.add_index(index, q._con)

              return 2

          elif old_version == 2:

              return 3

@@ -204,7 +204,8 @@ 

          """

          headers = {'Content-Type': SOAP_MEDIA_TYPE}

          try:

-             response = requests.post(logout.msgUrl, data=logout.msgBody,

+             response = requests.post(logout.msgUrl,

+                                      data={"SAMLRequest": logout.msgBody},

                                       headers=headers, timeout=30)

          except Exception as e:  # pylint: disable=broad-except

              self.error('SOAP HTTP request failed: (%s) (on %s)' %

file modified
+88 -75
@@ -42,10 +42,10 @@ 

  

  class BaseStore(Log):

      # Some helper functions used for upgrades

-     def add_constraint(self, constraint):

+     def add_constraint(self, constraint, connection):

          raise NotImplementedError()

  

-     def add_index(self, index):

+     def add_index(self, index, connection):

          raise NotImplementedError()

  

  
@@ -84,17 +84,17 @@ 

                                         **pool_args)

          self.is_readonly = False

  

-     def add_constraint(self, constraint):

+     def add_constraint(self, constraint, connection):

          if self._dbengine.dialect.name != 'sqlite':

              # It is impossible to add constraints to a pre-existing table for

              #  SQLite

              # source: http://www.sqlite.org/omitted.html

-             create_constraint = AddConstraint(constraint, bind=self._dbengine)

-             create_constraint.execute()

+             create_constraint = AddConstraint(constraint)

+             connection.execute(create_constraint)

  

-     def add_index(self, index):

-         add_index = CreateIndex(index, bind=self._dbengine)

-         add_index.execute()

+     def add_index(self, index, connection):

+         add_index = CreateIndex(index)

+         connection.execute(add_index)

  

      def debug(self, fact):

          if self.db_conn_log:
@@ -209,18 +209,14 @@ 

          for index in table_def['indexes']:

              idx_name = 'idx_%s_%s' % (name, '_'.join(index))

              table_creation.append(Index(idx_name, *index))

-         table = Table(name, MetaData(self._db.engine()), *table_creation)

+         table = Table(name, MetaData(), *table_creation)

          return table

  

      def _where(self, kvfilter):

-         where = None

+         where = []

          if kvfilter is not None:

              for k in kvfilter:

-                 w = self._table.c[k] == kvfilter[k]

-                 if where is None:

-                     where = w

-                 else:

-                     where = where & w

+                 where.append(self._table.c[k] == kvfilter[k])

          return where

  

      def _columns(self, columns=None):
@@ -244,32 +240,35 @@ 

          self._trans.commit()

  

      def create(self):

-         self._table.create(checkfirst=True)

+         self._table.create(self._db.engine(), checkfirst=True)

  

      def drop(self):

-         self._table.drop(checkfirst=True)

+         self._table.drop(self._db.engine(), checkfirst=True)

  

      def select(self, kvfilter=None, columns=None):

-         return self._con.execute(select(self._columns(columns),

-                                         self._where(kvfilter)))

+         return self._con.execute(

+             select(*self._columns(columns)).where(*self._where(kvfilter))

+         )

  

      def insert(self, values, ttl=None):

-         self._con.execute(self._table.insert(values))

+         self._con.execute(self._table.insert().values(values))

  

      def update(self, values, kvfilter):

-         self._con.execute(self._table.update(self._where(kvfilter), values))

+         self._con.execute(

+             self._table.update().where(*self._where(kvfilter)).values(values)

+         )

  

      def delete(self, kvfilter):

-         self._con.execute(self._table.delete(self._where(kvfilter)))

+         self._con.execute(self._table.delete().where(*self._where(kvfilter)))

  

      def perform_auto_cleanup(self):

          table = self._table

-         sel = select([table.c.uuid]). \

+         sel = select(table.c.uuid). \

              where(and_(table.c.name == 'expiration_time',

                         table.c.value <= str(datetime.datetime.now())))

          # pylint: disable=no-value-for-parameter

          d = table.delete().where(table.c.uuid.in_(sel))

-         return d.execute().rowcount

+         return self._con.execute(d).rowcount

  

  

  class FileStore(BaseStore):
@@ -296,10 +295,10 @@ 

              self._timestamp = timestamp

          return self._config

  

-     def add_constraint(self, constraint):

+     def add_constraint(self, constraint, connection):

          raise NotImplementedError()

  

-     def add_index(self, index):

+     def add_index(self, index, connection):

          raise NotImplementedError()

  

  
@@ -430,10 +429,10 @@ 

  

          self.is_readonly = False

  

-     def add_constraint(self, constraint):

+     def add_constraint(self, constraint, connection):

          raise NotImplementedError()

  

-     def add_index(self, index):

+     def add_index(self, index, connection):

          raise NotImplementedError()

  

      def close(self):
@@ -616,18 +615,12 @@ 

  

          return rows

  

-     def insert(self, value_row, ttl=None):

+     def insert(self, values, ttl=None):

          """Insert a new object into the store.

  

-         value_row is a list of column values.

+         values is a dict of column values.

          ttl is the time for which the object is supposed to be kept.

          """

-         value_row = list(value_row)

- 

-         values = {}

-         for column in self._columns:

-             values[column] = value_row.pop(0)

- 

          path, _ = self._get_most_specific_dir(values, False, update=True)

          self._store.client.write(path, json.dumps(values), ttl=ttl)

  
@@ -661,7 +654,9 @@ 

          path, levels_unused = self._get_most_specific_dir(kvfilter)

          if levels_unused == 0 or len(kvfilter) == 0:

              try:

-                 current = json.loads(self._store.client.read(path).value)

+                 current = self._store.client.read(path).value

+                 if current is not None:

+                     current = json.loads(current)

              except etcd.EtcdKeyNotFound:

                  return

              for key in kvfilter:
@@ -959,7 +954,7 @@ 

                          q.update({'value': options[opt]},

                                   {'name': name, 'option': opt})

                      else:

-                         q.insert((name, opt, options[opt]))

+                         q.insert({"name": name, "option": opt, "value": options[opt]})

  

                  for opt in curvals:

                      if opt not in options:
@@ -999,9 +994,13 @@ 

          with q:

              try:

                  for name in data:

-                     q.insert((newid, name, data[name]), ttl)

+                     q.insert({"uuid": newid, "name": name, "value": data[name]}, ttl)

                  if expiration_time:

-                     q.insert((newid, 'expiration_time', expiration_time), ttl)

+                     q.insert({

+                         "uuid": newid,

+                         "name": 'expiration_time',

+                         "value": expiration_time,

+                     }, ttl)

              except Exception as e:  # pylint: disable=broad-except

                  self.error("Failed to store %s data: [%s]" % (table, e))

                  raise
@@ -1046,7 +1045,11 @@ 

                                           {'uuid': uid, 'name': name})

                          else:

                              if datum[name] is not None:

-                                 q.insert((uid, name, datum[name]), ttl)

+                                 q.insert({

+                                     "uuid": uid,

+                                     "name": name,

+                                     "value": datum[name],

+                                 }, ttl)

  

              except Exception as e:  # pylint: disable=broad-except

                  self.error("Failed to store data in %s: [%s]" % (table, e))
@@ -1054,7 +1057,7 @@ 

  

      def del_unique_data(self, table, uuidval):

          kvfilter = {'uuid': uuidval}

-         q = self._query(self._db, table, UNIQUE_DATA_TABLE, trans=False)

+         q = self._query(self._db, table, UNIQUE_DATA_TABLE, trans=True)

          with q:

              try:

                  q.delete(kvfilter)
@@ -1104,8 +1107,9 @@ 

                        'login_config',

                        'provider_config',

                        'authz_config']:

-             q = self._query(self._db, table, OPTIONS_TABLE, trans=False)

-             q.create()

+             q = self._query(self._db, table, OPTIONS_TABLE, trans=True)

+             with q:

+                 q.create()

  

      def _upgrade_schema(self, old_version):

          if old_version == 1:
@@ -1115,17 +1119,19 @@ 

                            'login_config',

                            'provider_config']:

                  # pylint: disable=protected-access

-                 table = self._query(self._db, table, OPTIONS_TABLE,

-                                     trans=False)._table

-                 self._db.add_constraint(table.primary_key)

-                 for index in table.indexes:

-                     self._db.add_index(index)

+                 q = self._query(self._db, table, OPTIONS_TABLE, trans=True)

+                 table = q._table

+                 with q:

+                     self._db.add_constraint(table.primary_key, q._con)

+                     for index in table.indexes:

+                         self._db.add_index(index, q._con)

              return 2

          elif old_version == 2:

              # Version 3 adds the authz config table

              q = self._query(self._db, 'authz_config', OPTIONS_TABLE,

-                             trans=False)

-             q.create()

+                             trans=True)

+             with q:

+                 q.create()

              self.save_options('authz_config', 'global', {'enabled': 'allow'})

              return 3

          else:
@@ -1135,8 +1141,9 @@ 

          if not self.is_readonly:

              table = plugin_name+'_data'

              q = self._query(self._db, table, UNIQUE_DATA_TABLE,

-                             trans=False)

-             q.create()

+                             trans=True)

+             with q:

+                 q.create()

  

  

  class UserStore(Store):
@@ -1174,7 +1181,7 @@ 

                      q.update({'value': parameters}, {'name': user,

                                                       'option': key})

                  else:

-                     q.insert((user, key, parameters))

+                     q.insert({"name": user, "option": key, "value": parameters})

              except Exception as e:  # pylint: disable=broad-except

                  self.error('Failed to store consent: [%s]' % e)

                  raise
@@ -1220,8 +1227,9 @@ 

          return d

  

      def _initialize_table(self, tablename):

-         q = self._query(self._db, tablename, OPTIONS_TABLE, trans=False)

-         q.create()

+         q = self._query(self._db, tablename, OPTIONS_TABLE, trans=True)

+         with q:

+             q.create()

  

      def _initialize_schema(self):

          self._initialize_table('users')
@@ -1231,11 +1239,12 @@ 

          if old_version == 1:

              # In schema version 2, we added indexes and primary keys

              # pylint: disable=protected-access

-             table = self._query(self._db, 'users', OPTIONS_TABLE,

-                                 trans=False)._table

-             self._db.add_constraint(table.primary_key)

-             for index in table.indexes:

-                 self._db.add_index(index)

+             q = self._query(self._db, 'users', OPTIONS_TABLE, trans=True)

+             table = q._table

+             with q:

+                 self._db.add_constraint(table.primary_key, q._con)

+                 for index in table.indexes:

+                     self._db.add_index(index, q._con)

              return 2

          elif old_version == 2:

              # In schema 3 for UserStore, we added user_consent
@@ -1259,18 +1268,20 @@ 

  

      def _initialize_schema(self):

          q = self._query(self._db, self.table, UNIQUE_DATA_TABLE,

-                         trans=False)

-         q.create()

+                         trans=True)

+         with q:

+             q.create()

  

      def _upgrade_schema(self, old_version):

          if old_version == 1:

              # In schema version 2, we added indexes and primary keys

              # pylint: disable=protected-access

-             table = self._query(self._db, self.table, UNIQUE_DATA_TABLE,

-                                 trans=False)._table

-             self._db.add_constraint(table.primary_key)

-             for index in table.indexes:

-                 self._db.add_index(index)

+             q = self._query(self._db, self.table, UNIQUE_DATA_TABLE, trans=True)

+             table = q._table

+             with q:

+                 self._db.add_constraint(table.primary_key, q._con)

+                 for index in table.indexes:

+                     self._db.add_index(index, q._con)

              return 2

          elif old_version == 2:

              return 3
@@ -1357,18 +1368,20 @@ 

  

      def _initialize_schema(self):

          q = self._query(self._db, self.table, UNIQUE_DATA_TABLE,

-                         trans=False)

-         q.create()

+                         trans=True)

+         with q:

+             q.create()

  

      def _upgrade_schema(self, old_version):

          if old_version == 1:

              # In schema version 2, we added indexes and primary keys

              # pylint: disable=protected-access

-             table = self._query(self._db, self.table, UNIQUE_DATA_TABLE,

-                                 trans=False)._table

-             self._db.add_constraint(table.primary_key)

-             for index in table.indexes:

-                 self._db.add_index(index)

+             q = self._query(self._db, self.table, UNIQUE_DATA_TABLE, trans=True)

+             table = q._table

+             with q:

+                 self._db.add_constraint(table.primary_key, q._con)

+                 for index in table.indexes:

+                     self._db.add_index(index, q._con)

              return 2

          elif old_version == 2:

              return 3

file modified
+15 -12
@@ -28,11 +28,12 @@ 

          if old_version == 1:

              # In schema version 2, we added indexes and primary keys

              # pylint: disable=protected-access

-             table = self._query(self._db, 'sessions', SESSION_TABLE,

-                                 trans=False)._table

-             self._db.add_constraint(table.primary_key)

-             for index in table.indexes:

-                 self._db.add_index(index)

+             q = self._query(self._db, 'sessions', SESSION_TABLE, trans=True)

+             table = q._table

+             with q:

+                 self._db.add_constraint(table.primary_key, q._con)

+                 for index in table.indexes:

+                     self._db.add_index(index, q._con)

              return 2

          elif old_version == 2:

              return 3
@@ -41,11 +42,13 @@ 

  

      def _cleanup(self):

          # pylint: disable=protected-access

-         table = SqlQuery(self._db, 'sessions', SESSION_TABLE)._table

+         q = SqlQuery(self._db, 'sessions', SESSION_TABLE, trans=True)

+         table = q._table

          # pylint: disable=no-value-for-parameter

-         d = table.delete().where(table.c.expiration_time <=

-                                  str(datetime.datetime.now()))

-         return d.execute().rowcount

+         with q:

+             d = table.delete().where(table.c.expiration_time <=

+                                      str(datetime.datetime.now()))

+             return q._con.execute(d).rowcount

  

  

  class SqlSession(Session):
@@ -95,9 +98,9 @@ 

          with q:

              q.delete({'id': self.id})

              data = json.dumps((self._data, expiration_time)).encode('utf-8')

-             q.insert((self.id,

-                       base64.b64encode(data).decode('utf-8'),

-                       expiration_time))

+             q.insert({"id": self.id,

+                       "data": base64.b64encode(data).decode('utf-8'),

+                       "expiration_time": expiration_time})

  

      def _delete(self):

          q = SqlQuery(self._db, 'sessions', SESSION_TABLE)

file modified
+2 -1
@@ -28,4 +28,5 @@ 

  fi

  

  export PYTHONPATH=.

- exec $pyver ./tests/tests.py "$@"

+ export SQLALCHEMY_WARN_20=1

+ exec $pyver -W always::DeprecationWarning ./tests/tests.py "$@"

@@ -3,6 +3,6 @@ 

  # This should be kept in sync with the develop page on the website.

  # Distro-specific and python packages should go in the distro sub-dockerfiles.

  RUN yum install -y make httpd mod_auth_mellon postgresql-server \

-         openssl sssd libsss_simpleifp openldap-servers mod_auth_gssapi \

+         openssl sssd openldap-servers mod_auth_gssapi \

  	krb5-server socket_wrapper nss_wrapper nodejs krb5-workstation \

  	sqlite mod_ssl mod_auth_openidc

file modified
+1 -1
@@ -124,7 +124,7 @@ 

      ipsilonconf = os.path.join(testdir, 'etc', idpname, 'ipsilon.conf')

      newconf = configparser.ConfigParser()

      with open(ipsilonconf, 'r') as f:

-         newconf.readfp(f)

+         newconf.read_file(f)

      with open(ipsilonconf, 'w+') as f:

          newconf.set('global', 'admin.config.db',

                      '"configfile://%s"' % adminconf)

file modified
+6 -5
@@ -276,7 +276,8 @@ 

          return result.text

  

      def fetch_page(self, idp, target_url, follow_redirect=True, krb=False,

-                    require_consent=None, return_prefix=None, post_forms=True):

+                    require_consent=None, return_prefix=None, post_forms=True,

+                    allowed_codes=(200,)):

          """

          Fetch a page and parse the response code to determine what to do

          next.
@@ -329,7 +330,7 @@ 

                          continue

                  except WrongPage:

                      pass

-             elif r.status_code == 200:

+             elif r.status_code in allowed_codes:

                  page = PageTree(r)

  

                  try:
@@ -379,8 +380,8 @@ 

  

                  return page

              else:

-                 raise ValueError("Unhandled status (%d) on url %s" % (

-                                  r.status_code, url))

+                 raise ValueError("Unhandled status (%d) on url %s: %s" % (

+                                  r.status_code, url, r.content))

  

      def auth_to_idp(self, idp, krb=False, rule=None, expected=None):

  
@@ -449,7 +450,7 @@ 

              payload = {'name': desc}

              r = self.post(url, headers=headers, data=payload, files=metafile)

          if r.status_code != expected_status:

-             raise ValueError('Failed to post SP data [%s]' % repr(r))

+             raise ValueError('Failed to post SP data [%s]: %s' % (repr(r), r.content))

  

          if not rest:

              page = PageTree(r)

file modified
+22 -1
@@ -91,6 +91,8 @@ 

      Options +Includes

      Require all granted

  </Directory>

+ ErrorDocument 400 /error

+ WSGIScriptAlias /error ${HTTPDIR}/errors.py

  """

      t = Template(location)

      text = t.substitute({'HTTPDIR': httpdir})
@@ -101,6 +103,22 @@ 

      os.mkdir(httpdir + '/sp')

      with open(httpdir + '/sp/index.html', 'w') as f:

          f.write(index)

+     error_app = """

+ from wsgiref.util import setup_testing_defaults

+ def application(environ, start_response):

+     setup_testing_defaults(environ)

+ 

+     status = '200 OK'

+     headers = [('Content-type', 'text/plain; charset=utf-8')]

+ 

+     start_response(status, headers)

+ 

+     ret = [("%s: %s\\n" % (key, value)).encode("utf-8")

+            for key, value in environ.items()]

+     return ret

+ """

+     with open(httpdir + '/errors.py', 'w') as f:

+         f.write(error_app)

  

  

  def convert_to_dict(envlist):
@@ -484,7 +502,10 @@ 

      with TC.case('Access first SP Protected Area with IdP deny, with '

                   'pre-auth'):

          sess2.auth_to_idp(idpname)

-         page = sess2.fetch_page(idpname, 'https://127.0.0.11:45081/sp/')

+         # mod_auth_openidc >=2.4.14 returns a 400 error page on access denied

+         page = sess2.fetch_page(

+             idpname, 'https://127.0.0.11:45081/sp/', allowed_codes=[200, 400]

+         )

          check_text_results(page.text,

                             'OpenID Connect Provider error: access_denied')

  

file modified
+5 -3
@@ -146,6 +146,7 @@ 

      # Sessions are valid for one minute, and we clean up once per minute.

      # However, checking after two minute is kinda cutting it close, so we add ten

      # seconds to make sure the system has had time to clean up.

+     print("Waiting for sessions to expire")

      time.sleep(130)

  

      with TC.case('Verify logged out state'):
@@ -158,8 +159,9 @@ 

          conn = sqlite3.connect(sess_db)

          cur = conn.cursor()

          cur.execute('SELECT * FROM saml2_sessions;')

-         if len(cur.fetchall()) != 0:

-             raise ValueError('SAML2 sessions left behind: %s' % cur.fetchall())

+         sessions = cur.fetchall()

+         if len(sessions) != 0:

+             raise ValueError('SAML2 sessions left behind: %s' % sessions)

  

  

      with TC.case('Checking that refreshable OpenIDC tokens are not expired'):
@@ -213,7 +215,7 @@ 

              )

  

          tokens = store.get_unique_data("token")

-         assert len(tokens) == 1

+         assert len(tokens) == 1, f"{len(tokens)} tokens: {tokens!r}"

          if list(tokens.keys())[0] != token_refreshable["token_id"]:

              raise Exception("The refreshable token has been cleaned up")

  

file modified
-5
@@ -2,11 +2,6 @@ 

  #

  # Copyright (C) 2014-2017 Ipsilon project Contributors, for license see COPYING

  

- from __future__ import print_function

- 

- __requires__ = ['sqlalchemy >= 0.8']

- import pkg_resources  # pylint: disable=unused-import

- 

  import argparse

  from ipsilon.util import plugin

  import os