#164 Implement a standardized automatic cleanup functionality
Merged 7 years ago by puiterwijk. Opened 7 years ago by puiterwijk.
puiterwijk/ipsilon autocleanup  into  master

file modified
+1
@@ -106,6 +106,7 @@ 

  	PYTHONPATH=./ ./tests/tests.py --path=$(TESTDIR) --test=openidc

  	PYTHONPATH=./ ./tests/tests.py --path=$(TESTDIR) --test=authz

  	PYTHONPATH=./ ./tests/tests.py --path=$(TESTDIR) --test=dbupgrades

+ 	PYTHONPATH=./ ./tests/tests.py --path=$(TESTDIR) --test=testcleanup

  

  test: lp-test unittests tests

  

@@ -104,6 +104,7 @@ 

                  'wellknowndir': args['wellknown_dir'],

                  'sysuser': args['system_user'],

                  'cleanup_interval': args['cleanup_interval'],

+                 'session_timeout': args['session_timeout'],

                  'ipsilondir': BINDIR,

                  'staticdir': STATICDIR,

                  'cachedir': CACHEDIR,
@@ -417,9 +418,12 @@ 

      parser.add_argument('--samlsessions-dburi',

                          help='SAML 2 sessions database URI (override ' +

                               'template)')

-     parser.add_argument('--cleanup-interval', default=30,

+     parser.add_argument('--cleanup-interval', default=30, type=int,

                          help='Interval between cleaning up stale database ' +

                               'entries (in minutes, default: 30 minutes)')

+     parser.add_argument('--session-timeout', default=30, type=int,

+                         help='Time that sessions are valid for (in minutes, ' +

+                              'default: 30 minutes)')

  

      lms = []

      azs = []

@@ -11,6 +11,8 @@ 

  

  

  class OpenIDStore(Store, OpenIDStoreInterface):

+     _auto_cleanup_tables = ['association', 'nonce']

+ 

      def __init__(self, database_url):

          Store.__init__(self, database_url=database_url)

  
@@ -22,7 +24,7 @@ 

                   'assoc_type': assoc.assoc_type}

  

          data = {iden: datum}

-         self.save_unique_data('association', data)

+         self.save_unique_data('association', data, ttl=assoc.lifetime)

  

      def getAssociation(self, server_url, handle=None):

          iden = '%s-%s' % (server_url, handle)
@@ -48,6 +50,10 @@ 

          iden = '%s-%s' % (server_url, handle)

          self.del_unique_data('association', iden)

  

+     def cleanupAssociations(self):

+         # This is automatically cleaned up

+         return

+ 

      def useNonce(self, server_url, timestamp, salt):

          if abs(timestamp - time()) > NonceSKEW:

              return False
@@ -61,33 +67,13 @@ 

  

          datum = {'timestamp': timestamp}

          data = {iden: datum}

-         self.save_unique_data('nonce', data)

+         self.save_unique_data('nonce', data, ttl=NonceSKEW)

  

          return True

  

-     def _cleanup(self):

-         res1 = self.cleanupNonces()

-         res2 = self.cleanupAssociations()

-         return res1 + res2

- 

      def cleanupNonces(self):

-         nonces = self.get_unique_data('nonce')

-         cleaned = 0

-         for iden in nonces:

-             if nonces[iden]['timestamp'] < (time() - NonceSKEW):

-                 cleaned += 1

-                 self.del_unique_data('nonce', iden)

-         return cleaned

- 

-     def cleanupAssociations(self):

-         assocs = self.get_unique_data('association')

-         cleaned = 0

-         for iden in assocs:

-             if ((int(assocs[iden]['issued']) + int(assocs[iden]['lifetime'])) <

-                     time()):

-                 cleaned += 1

-                 self.del_unique_data('association', iden)

-         return cleaned

+         # This is automatically cleaned up

+         return

  

      def _initialize_schema(self):

          q = self._query(self._db, 'association', UNIQUE_DATA_TABLE,

@@ -3,7 +3,6 @@ 

  from cherrypy import config as cherrypy_config

  from ipsilon.util.log import Log

  from ipsilon.util.data import SAML2SessionStore

- import datetime

  from lasso import (

      SAML2_METADATA_BINDING_SOAP,

      SAML2_METADATA_BINDING_REDIRECT,
@@ -32,13 +31,11 @@ 

                      logout response will include an InResponseTo value

                      which matches this.

         logout_request - the Logout request object

-        expiration_time - the time the login session expires

         supported_logout_mechs - logout mechanisms supported by this session

      """

      def __init__(self, uuidval, session_id, provider_id, user,

                   login_session, logoutstate=None, relaystate=None,

                   logout_request=None, request_id=None,

-                  expiration_time=None,

                   supported_logout_mechs=None):

  

          self.uuidval = uuidval
@@ -50,7 +47,6 @@ 

          self.relaystate = relaystate

          self.request_id = request_id

          self.logout_request = logout_request

-         self.expiration_time = expiration_time

          if supported_logout_mechs is None:

              supported_logout_mechs = []

          self.supported_logout_mechs = supported_logout_mechs
@@ -91,7 +87,6 @@ 

          data['relaystate'] = self.relaystate

          data['logout_request'] = self.logout_request

          data['request_id'] = self.request_id

-         data['expiration_time'] = self.expiration_time

  

          return {self.uuidval: data}

  
@@ -128,7 +123,6 @@ 

                             data.get('relaystate'),

                             data.get('logout_request'),

                             data.get('request_id'),

-                            data.get('expiration_time'),

                             data.get('supported_logout_mechs'))

  

      def add_session(self, session_id, provider_id, user, login_session,
@@ -145,25 +139,21 @@ 

          """

          self.user = user

  

-         timeout = cherrypy_config['tools.sessions.timeout']

-         t = datetime.timedelta(seconds=timeout * 60)

-         expiration_time = datetime.datetime.now() + t

+         ttl = cherrypy_config['tools.sessions.timeout'] * 60

  

          data = {'session_id': session_id,

                  'provider_id': provider_id,

                  'user': user,

                  'login_session': login_session,

                  'logoutstate': LOGGED_IN,

-                 'expiration_time': expiration_time,

                  'request_id': request_id,

                  'supported_logout_mechs': supported_logout_mechs}

  

-         uuidval = self._ss.new_session(data)

+         uuidval = self._ss.new_session(data, ttl)

  

          return SAMLSession(uuidval, session_id, provider_id, user,

                             login_session, LOGGED_IN,

-                            request_id=request_id,

-                            expiration_time=expiration_time)

+                            request_id=request_id)

  

      def get_session_by_id(self, session_id):

          """

file modified
+66 -32
@@ -172,7 +172,7 @@ 

          return self._con.execute(select(self._columns(columns),

                                          self._where(kvfilter)))

  

-     def insert(self, values):

+     def insert(self, values, ttl=None):

          self._con.execute(self._table.insert(values))

  

      def update(self, values, kvfilter):
@@ -181,6 +181,15 @@ 

      def delete(self, kvfilter):

          self._con.execute(self._table.delete(self._where(kvfilter)))

  

+     def perform_auto_cleanup(self):

+         table = self._table

+         sel = select([table.c.uuid]). \

+             where(and_(table.c.name == 'expiration_time',

+                        table.c.value <= str(datetime.datetime.now())))

+         # pylint: disable=no-value-for-parameter

+         d = table.delete().where(table.c.uuid.in_(sel))

+         return d.execute().rowcount

+ 

  

  class FileStore(BaseStore):

  
@@ -297,7 +306,7 @@ 

                                                   repr(res)))

          return res

  

-     def insert(self, values):

+     def insert(self, values, ttl=None):

          raise NotImplementedError

  

      def update(self, values, kvfilter):
@@ -306,6 +315,9 @@ 

      def delete(self, kvfilter):

          raise NotImplementedError

  

+     def perform_auto_cleanup(self):

+         raise NotImplementedError

+ 

  

  class Store(Log):

      # Static, Store-level variables
@@ -313,8 +325,12 @@ 

      __cleanups = {}

  

      # Static, class-level variables

-     # Either set this to False, or implement _cleanup, in child classes

+     # Either set this to False, or implement cleanup

+     # The two methods for cleanup are:

+     # - Implement a method _cleanup in the child class

+     # - Set _auto_cleanups to a list of UNIQUE_DATA tables

      _should_cleanup = True

+     _auto_cleanup_tables = []

  

      def __init__(self, config_name=None, database_url=None):

          if config_name is None and database_url is None:
@@ -373,6 +389,11 @@ 

                                 'removed_entries': -1})

  

              # Cleanup has been long enough ago, let's run

+             self.debug('Starting autoclean for %s' % self.__class__.__name__)

+             auto_removed_entries = self._auto_cleanup()

+             self.debug('Auto-cleaned up %i entries for %s' %

+                        (auto_removed_entries, self.__class__.__name__))

+ 

              self.debug('Cleaning up for %s' % self.__class__.__name__)

              removed_entries = self._cleanup()

              self.debug('Cleaned up %i entries for %s' %
@@ -382,12 +403,24 @@ 

                                {'timestamp': int(time.time()),

                                 'removed_entries': removed_entries})

  

+     def _auto_cleanup(self):

+         # This function runs an automated cleanup for all subclasses that have

+         # set some auto_cleanup_tables. This requires that the tables mentioned

+         # use the standard UNIQUE_DATA_TABLE system, and they specify either an

+         # expiration_time or a ttl to new_unique_data.

+         cleaned = 0

+         for table in self._auto_cleanup_tables:

+             self.debug('Auto-cleaning %s' % table)

+             q = self._query(self._db, table, UNIQUE_DATA_TABLE)

+             cleaned_table = q.perform_auto_cleanup()

+             self.debug('Cleaned up %i entries' % cleaned_table)

+             cleaned += cleaned_table

+         return cleaned

+ 

      def _cleanup(self):

          # The default cleanup is to do nothing

          # This function should return the number of rows it cleaned up.

          # This information may be used to automatically tune the clean period.

-         self.error('Cleanup for %s not implemented' %

-                    self.__class__.__name__)

          return 0

  

      def _code_schema_version(self):
@@ -587,13 +620,22 @@ 

              self.error("Failed to delete from %s: [%s]" % (table, e))

              raise

  

-     def new_unique_data(self, table, data):

+     def new_unique_data(self, table, data, ttl=None, expiration_time=None):

+         if expiration_time:

+             ttl = expiration_time - int(time.time())

+         elif ttl:

+             expiration_time = int(time.time()) + ttl

+         if ttl and ttl < 0:

+             raise ValueError('Negative TTL specified: %s' % ttl)

+ 

          newid = str(uuid.uuid4())

          q = None

          try:

              q = self._query(self._db, table, UNIQUE_DATA_TABLE)

              for name in data:

-                 q.insert((newid, name, data[name]))

+                 q.insert((newid, name, data[name]), ttl)

+             if expiration_time:

+                 q.insert((newid, 'expiration_time', expiration_time), ttl)

              q.commit()

          except Exception, e:  # pylint: disable=broad-except

              if q:
@@ -612,7 +654,14 @@ 

              kvfilter['value'] = value

          return self._load_data(table, UNIQUE_DATA_TABLE, kvfilter)

  

-     def save_unique_data(self, table, data):

+     def save_unique_data(self, table, data, ttl=None, expiration_time=None):

+         if expiration_time:

+             ttl = expiration_time - int(time.time())

+         elif ttl:

+             expiration_time = int(time.time()) + ttl

+         if ttl and ttl < 0:

+             raise ValueError('Negative TTL specified: %s' % ttl)

+ 

          q = None

          try:

              q = self._query(self._db, table, UNIQUE_DATA_TABLE)
@@ -623,6 +672,8 @@ 

                      curvals[r[0]] = r[1]

  

                  datum = data[uid]

+                 if expiration_time:

+                     datum['expiration_time'] = expiration_time

                  for name in datum:

                      if name in curvals:

                          if datum[name] is None:
@@ -632,7 +683,7 @@ 

                                       {'uuid': uid, 'name': name})

                      else:

                          if datum[name] is not None:

-                             q.insert((uid, name, datum[name]))

+                             q.insert((uid, name, datum[name]), ttl)

  

              q.commit()

          except Exception, e:  # pylint: disable=broad-except
@@ -844,6 +895,8 @@ 

  

  class TranStore(Store):

  

+     _auto_cleanup_tables = ['transactions']

+ 

      def __init__(self, path=None):

          super(TranStore, self).__init__('transactions.db')

          self.table = 'transactions'
@@ -869,20 +922,11 @@ 

          else:

              raise NotImplementedError()

  

-     def _cleanup(self):

-         # pylint: disable=protected-access

-         table = SqlQuery(self._db, self.table, UNIQUE_DATA_TABLE)._table

-         in_one_hour = datetime.datetime.now() - datetime.timedelta(hours=1)

-         sel = select([table.c.uuid]). \

-             where(and_(table.c.name == 'origintime',

-                        table.c.value <= str(in_one_hour)))

-         # pylint: disable=no-value-for-parameter

-         d = table.delete().where(table.c.uuid.in_(sel))

-         return d.execute().rowcount

- 

  

  class SAML2SessionStore(Store):

  

+     _auto_cleanup_tables = ['saml2_sessions']

+ 

      def __init__(self, database_url):

          super(SAML2SessionStore, self).__init__(database_url=database_url)

          self.table = 'saml2_sessions'
@@ -906,20 +950,10 @@ 

              raise ValueError("Multiple entries returned")

          return data.keys()[0]

  

-     def _cleanup(self):

-         # pylint: disable=protected-access

-         table = SqlQuery(self._db, self.table, UNIQUE_DATA_TABLE)._table

-         sel = select([table.c.uuid]). \

-             where(and_(table.c.name == 'expiration_time',

-                        table.c.value <= str(datetime.datetime.now())))

-         # pylint: disable=no-value-for-parameter

-         d = table.delete().where(table.c.uuid.in_(sel))

-         return d.execute().rowcount

- 

      def get_data(self, idval=None, name=None, value=None):

          return self.get_unique_data(self.table, idval, name, value)

  

-     def new_session(self, datum):

+     def new_session(self, datum, ttl):

          if 'supported_logout_mechs' in datum:

              datum['supported_logout_mechs'] = ','.join(

                  datum['supported_logout_mechs']
@@ -927,7 +961,7 @@ 

          for attr in datum:

              if isinstance(datum[attr], str):

                  datum[attr] = unicode(datum[attr], 'utf-8')

-         return self.new_unique_data(self.table, datum)

+         return self.new_unique_data(self.table, datum, ttl)

  

      def get_session(self, session_id=None, request_id=None):

          if session_id:

file modified
+6 -3
@@ -9,6 +9,8 @@ 

  TRANSTABLE = 'transactions'

  TRANSID = "ipsilon_transaction_id"

  

+ SESSION_DURATION = 3600

+ 

  

  class Transaction(Log):

  
@@ -45,7 +47,8 @@ 

      def create_tid(self):

          data = {'provider': self.provider,

                  'origintime': str(datetime.now())}

-         self.transaction_id = self._ts.new_unique_data(TRANSTABLE, data)

+         self.transaction_id = self._ts.new_unique_data(TRANSTABLE, data,

+                                                        ttl=SESSION_DURATION)

          self._set_cookie()

          self.debug('Transaction: %s %s' % (self.provider,

                                             self.transaction_id))
@@ -55,7 +58,7 @@ 

          self.cookie.send()

          cookiedata = {'cookie': self.cookie.name}

          data = {self.transaction_id: cookiedata}

-         self._ts.save_unique_data(TRANSTABLE, data)

+         self._ts.save_unique_data(TRANSTABLE, data, ttl=SESSION_DURATION)

  

      def _get_cookie(self, data=None):

          if data is None:
@@ -80,7 +83,7 @@ 

  

      def store(self, data):

          savedata = {self.transaction_id: data}

-         self._ts.save_unique_data(TRANSTABLE, savedata)

+         self._ts.save_unique_data(TRANSTABLE, savedata, ttl=SESSION_DURATION)

  

      def retrieve(self):

          data = self._ts.get_unique_data(TRANSTABLE,

@@ -18,6 +18,6 @@ 

  tools.sessions.storage_type = "${sesstype}"

  tools.sessions.storage_${sessopt} = "${sessval}"

  tools.sessions.path = "${instanceurl}"

- tools.sessions.timeout = 60

+ tools.sessions.timeout = ${session_timeout}

  tools.sessions.httponly = ${secure}

  tools.sessions.secure = ${secure}

file added
+199
@@ -0,0 +1,199 @@ 

+ #!/usr/bin/python

+ #

+ # Copyright (C) 2014 Ipsilon project Contributors, for license see COPYING

+ 

+ from helpers.common import IpsilonTestBase  # pylint: disable=relative-import

+ from helpers.http import HttpSessions  # pylint: disable=relative-import

+ import os

+ import pwd

+ import sys

+ import sqlite3

+ from string import Template

+ import time

+ 

+ idp_g = {'TEMPLATES': '${TESTDIR}/templates/install',

+          'CONFDIR': '${TESTDIR}/etc',

+          'DATADIR': '${TESTDIR}/lib',

+          'CACHEDIR': '${TESTDIR}/cache',

+          'HTTPDCONFD': '${TESTDIR}/${NAME}/conf.d',

+          'STATICDIR': '${ROOTDIR}',

+          'BINDIR': '${ROOTDIR}/ipsilon',

+          'WSGI_SOCKET_PREFIX': '${TESTDIR}/${NAME}/logs/wsgi'}

+ 

+ 

+ idp_a = {'hostname': '${ADDRESS}:${PORT}',

+          'admin_user': '${TEST_USER}',

+          'system_user': '${TEST_USER}',

+          'instance': '${NAME}',

+          'testauth': 'yes',

+          'pam': 'no',

+          'gssapi': 'no',

+          'ipa': 'no',

+          'cleanup_interval': 1,

+          'session_timeout': 0.1,

+          'server_debugging': 'True'}

+ 

+ 

+ sp_g = {'HTTPDCONFD': '${TESTDIR}/${NAME}/conf.d',

+         'SAML2_TEMPLATE': '${TESTDIR}/templates/install/saml2/sp.conf',

+         'CONFFILE': '${TESTDIR}/${NAME}/conf.d/ipsilon-%s.conf',

+         'HTTPDIR': '${TESTDIR}/${NAME}/%s'}

+ 

+ 

+ sp_a = {'hostname': '${ADDRESS}',

+         'saml_idp_metadata': 'https://127.0.0.10:45080/idp1/saml2/metadata',

+         'saml_auth': '/sp',

+         'httpd_user': '${TEST_USER}'}

+ 

+ 

+ def fixup_sp_httpd(httpdir):

+     location = """

+ 

+ Alias /sp ${HTTPDIR}/sp

+ 

+ <Directory ${HTTPDIR}/sp>

+     <IfModule mod_authz_core.c>

+         Require all granted

+     </IfModule>

+     <IfModule !mod_authz_core.c>

+         Order Allow,Deny

+         Allow from All

+     </IfModule>

+ </Directory>

+ """

+     index = """WORKS!"""

+ 

+     t = Template(location)

+     text = t.substitute({'HTTPDIR': httpdir})

+     with open(httpdir + '/conf.d/ipsilon-saml.conf', 'a') as f:

+         f.write(text)

+ 

+     os.mkdir(httpdir + '/sp')

+     with open(httpdir + '/sp/index.html', 'w') as f:

+         f.write(index)

+ 

+ 

+ class IpsilonTest(IpsilonTestBase):

+ 

+     def __init__(self):

+         super(IpsilonTest, self).__init__('testcleanup', __file__)

+ 

+     def setup_servers(self, env=None):

+         print "Installing IDP server"

+         name = 'idp1'

+         addr = '127.0.0.10'

+         port = '45080'

+         idp = self.generate_profile(idp_g, idp_a, name, addr, port)

+         conf = self.setup_idp_server(idp, name, addr, port, env)

+ 

+         print "Starting IDP's httpd server"

+         self.start_http_server(conf, env)

+ 

+         print "Installing SP server"

+         name = 'sp1'

+         addr = '127.0.0.11'

+         port = '45081'

+         sp = self.generate_profile(sp_g, sp_a, name, addr, port)

+         conf = self.setup_sp_server(sp, name, addr, port, env)

+         fixup_sp_httpd(os.path.dirname(conf))

+ 

+         print "Starting first SP's httpd server"

+         self.start_http_server(conf, env)

+ 

+ 

+ if __name__ == '__main__':

+ 

+     idpname = 'idp1'

+     sp1name = 'sp1'

+     user = pwd.getpwuid(os.getuid())[0]

+ 

+     sess = HttpSessions()

+     sess.add_server(idpname, 'https://127.0.0.10:45080', user, 'ipsilon')

+     sess.add_server(sp1name, 'https://127.0.0.11:45081')

+ 

+     print "testcleanup: Verify logged out state ...",

+     try:

+         page = sess.fetch_page(idpname, 'https://127.0.0.10:45080/idp1/')

+         page.expected_value('//div[@id="content"]/p/a/text()', 'Log In')

+     except Exception, e:  # pylint: disable=broad-except

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     print "testcleanup: Authenticate to IDP ...",

+     try:

+         sess.auth_to_idp(idpname)

+     except Exception, e:  # pylint: disable=broad-except

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     print "testcleanup: Add SP Metadata to IDP ...",

+     try:

+         sess.add_sp_metadata(idpname, sp1name)

+     except Exception, e:  # pylint: disable=broad-except

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     print "testcleanup: Access first SP Protected Area ...",

+     try:

+         page = sess.fetch_page(idpname, 'https://127.0.0.11:45081/sp/')

+         page.expected_value('text()', 'WORKS!')

+     except ValueError, e:

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     print "testcleanup: Verify logged in state ...",

+     try:

+         page = sess.fetch_page(idpname, 'https://127.0.0.10:45080/idp1/')

+         page.expected_value('//div[@id="content"]/p/a/text()', None)

+     except Exception, e:  # pylint: disable=broad-except

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     print "testcleanup: Checking that SAML2 sessions were created ...",

+     try:

+         sess_db = os.path.join(os.environ['TESTDIR'],

+                                'lib/idp1/saml2.sessions.db.sqlite')

+         conn = sqlite3.connect(sess_db)

+         cur = conn.cursor()

+         cur.execute('SELECT * FROM saml2_sessions;')

+         if len(cur.fetchall()) == 0:

+             raise ValueError('SAML2 sessions not created')

+         conn.close()

+     except ValueError, e:

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     # Sessions are valid for six seconds, and we clean up once per minute.

+     # However, checking after a minute is kinda cutting it close, so we add ten

+     # seconds to make sure the system has had time to clean up.

+     print "Waiting a minute for cleanup to happen ..."

+     time.sleep(70)

+ 

+     print "testcleanup: Verify logged out state ...",

+     try:

+         page = sess.fetch_page(idpname, 'https://127.0.0.10:45080/idp1/')

+         page.expected_value('//div[@id="content"]/p/a/text()', 'Log In')

+     except Exception, e:  # pylint: disable=broad-except

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

+ 

+     print "testcleanup: Checking that SAML2 sessions were destroyed ...",

+     try:

+         sess_db = os.path.join(os.environ['TESTDIR'],

+                                'lib/idp1/saml2.sessions.db.sqlite')

+         conn = sqlite3.connect(sess_db)

+         cur = conn.cursor()

+         cur.execute('SELECT * FROM saml2_sessions;')

+         if len(cur.fetchall()) != 0:

+             raise ValueError('SAML2 sessions left behind: %s' % cur.fetchall())

+     except ValueError, e:

+         print >> sys.stderr, " ERROR: %s" % repr(e)

+         sys.exit(1)

+     print " SUCCESS"

no initial comment

Looks good here. :+1:, especially with the test case(s).

Commit e192c88 fixes this pull-request

Pull-Request has been merged by puiterwijk@redhat.com

7 years ago

Commit 1d06a2f fixes this pull-request

Pull-Request has been merged by puiterwijk@redhat.com

7 years ago

Commit 1275d9f fixes this pull-request

Pull-Request has been merged by puiterwijk@redhat.com

7 years ago

Commit 188cdf5 fixes this pull-request

Pull-Request has been merged by puiterwijk@redhat.com

7 years ago

Commit 1717efc fixes this pull-request

Pull-Request has been merged by puiterwijk@redhat.com

7 years ago