#490 Create db session per handler
Closed 4 years ago by qwan. Opened 4 years ago by qwan.
qwan/freshmaker db-session-per-handler  into  master

file modified
+2 -1
@@ -237,7 +237,8 @@ 

      LOG_LEVEL = 'debug'

      DEBUG = True

  

-     SQLALCHEMY_DATABASE_URI = 'sqlite://'

+     SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(os.path.join(

+         dbdir, 'freshmaker_test.db'))
qwan commented 4 years ago

in memory sqlite db doesn't work after this change.

  

      MESSAGING = 'in_memory'

      MESSAGING_SENDER = 'in_memory'

file modified
+7 -7
@@ -31,7 +31,7 @@ 

  from flask_login import login_required as _login_required

  from werkzeug.exceptions import Unauthorized

  

- from freshmaker import conf, log

+ from freshmaker import conf, log, db

  from freshmaker.errors import Forbidden

  from freshmaker.models import User, commit_on_success

  
@@ -70,9 +70,9 @@ 

  

      username, realm = remote_user.split('@')

  

-     user = User.find_user_by_name(username)

+     user = User.find_user_by_name(db.session, username)

      if not user:

-         user = User.create_user(username=username)

+         user = User.create_user(db.session, username=username)

  

      try:

          groups = query_ldap_groups(username)
@@ -102,9 +102,9 @@ 

      if not username:

          raise Unauthorized('Unable to get user information (DN) from client certificate')

  

-     user = User.find_user_by_name(username)

+     user = User.find_user_by_name(db.session, username)

      if not user:

-         user = User.create_user(username=username)

+         user = User.create_user(db.session, username=username)

  

      g.groups = []

      g.user = user
@@ -169,9 +169,9 @@ 

  

      user_info = get_user_info(token)

  

-     user = User.find_user_by_name(username)

+     user = User.find_user_by_name(db.session, username)

      if not user:

-         user = User.create_user(username=username)

+         user = User.create_user(db.session, username=username)

  

      g.groups = user_info.get('groups', [])

      g.user = user

file modified
+41 -15
@@ -25,13 +25,16 @@ 

  import json

  import re

  import copy

+ import sqlalchemy

  from functools import wraps

+ from sqlalchemy.orm import scoped_session, sessionmaker

  

- from freshmaker import conf, log, db, models

+ from freshmaker import conf, log, models

  from freshmaker.kojiservice import koji_service, parse_NVR

  from freshmaker.models import ArtifactBuildState

  from freshmaker.types import EventState

  from freshmaker.models import ArtifactBuild, Event

+ from freshmaker.monitor import db_hook_event_listeners

  from freshmaker.utils import krb_context, get_rebuilt_nvr

  from freshmaker.errors import UnprocessableEntity, ProgrammingError

  from freshmaker.odcsclient import create_odcs_client, FreshmakerODCSClient
@@ -71,17 +74,19 @@ 

  

              # In case the exception interrupted the database transaction,

              # rollback it.

-             db.session.rollback()

+             handler.db_session.rollback()

  

              # Mark the event as failed.

              db_event_id = handler.current_db_event_id

-             db_event = db.session.query(Event).filter_by(

+             db_event = handler.db_session.query(Event).filter_by(

                  id=db_event_id).first()

              if db_event:

                  msg = "Handling of event failed with traceback: %s" % (str(e))

-                 db_event.transition(EventState.FAILED, msg)

-                 db_event.builds_transition(ArtifactBuildState.FAILED.value, msg)

-                 db.session.commit()

+                 db_event.transition(handler.db_session, EventState.FAILED, msg)

+                 db_event.builds_transition(

+                     handler.db_session, ArtifactBuildState.FAILED.value, msg

+                 )

+                 handler.db_session.commit()

              raise

      return decorator

  
@@ -119,17 +124,18 @@ 

  

                  # In case the exception interrupted the database transaction,

                  # rollback it.

-                 db.session.rollback()

+                 handler.db_session.rollback()

  

                  # Mark the event as failed.

                  build_id = handler.current_db_artifact_build_id

-                 build = db.session.query(ArtifactBuild).filter_by(

+                 build = handler.db_session.query(ArtifactBuild).filter_by(

                      id=build_id).first()

                  if build:

                      build.transition(

+                         handler.db_session,

                          ArtifactBuildState.FAILED.value, "Handling of "

                          "build failed with traceback: %s" % (str(e)))

-                     db.session.commit()

+                     handler.db_session.commit()

                  raise

          return decorator

      return wrapper
@@ -147,6 +153,10 @@ 

      order = 50

  

      def __init__(self):

+         # Create per-handler db session instead of using the global 'db.session',

+         # this is to avoid caching lots of records in the global 'db.session' object

+         self.db_session = self._create_db_session()
cqi commented 4 years ago

+ # Create per-handler db session instead of using the global 'db.session',
+ # this is to avoid caching lots of records in the global 'db.session' object
+ self.db_session = self._create_db_session()

This change should not be helpful to achieve the goal to avoid caching lots of records.

With this change, every handler will have its own db session object and all the resources, which are not released explicitly, will remain in the object. Meanwhile, when process an event, a handler object is initialized and can_handle is called to determine whether the event can be handled by that handler. That means the db session object is created even if it is not used.

I would suggest another idea to implement per-handler db session, FYI.

The core concepts used here are

  • SQLAlchemy scoped_session is a thread-local session[1].
  • Session.remove is used to release resources managed by a session object.

Implementation steps:

  1. Call scoped_session to create a global db session object.
  2. Modify BaseHandler.handle to accept the db session object as an argument.
  3. Pass the db session object to BaseHandler.handle in FreshmakerConsumer.process_event.
  4. After the call of BaseHandler.handle, whatever success or failure, call Session.remove to release all the resources explicilty.

A pseudo-code could be:

engine_opts = {
    "configuration": {"sqlalchemy.url": conf.sqlalchemy_database_uri},
}
engine = sqlalchemy.engine_from_config(**engine_opts)
session_factory = sessionmaker(bind=engine)
db_session = scoped_session(session_factory)

try:
    further_work = handler.handle(msg, db_session) or []
except:
    # handle the exception
finally:
    db_session.remove()

Two major benefits are:

  • Only one SQLAlchemy database session is created per-handler.
  • All the resources managed by the session as well as the session object itself could be released and removed from memory as earlier as possible just after the BaseHandler.handle call.

[1] https://docs.sqlalchemy.org/en/13/orm/contextual.html#unitofwork-contextual

+ 

          self._db_event_id = None

          self._db_artifact_build_id = None

          self._log_prefix = ""
@@ -217,12 +227,23 @@ 

  

      @property

      def current_db_event(self):

-         return db.session.query(Event).filter_by(id=self.current_db_event_id).first()

+         return self.db_session.query(Event).filter_by(id=self.current_db_event_id).first()

  

      @property

      def current_db_artifact_build_id(self):

          return self._db_artifact_build_id

  

+     def _create_db_session(self):

+         engine_opts = {

+             "configuration": {"sqlalchemy.url": conf.sqlalchemy_database_uri},

+         }

+         engine = sqlalchemy.engine_from_config(**engine_opts)

+         session_factory = sessionmaker(bind=engine)

+         db_session = scoped_session(session_factory)

+         db_hook_event_listeners(db_session.bind.engine)

+ 

+         return db_session

+ 

      def set_context(self, db_object):

          """

          Sets the current context of a handler. This method accepts models.Event
@@ -292,14 +313,14 @@ 

              ev = event

          else:

              ev = models.Event.get_or_create(

-                 db.session, event.msg_id, event.search_key, event.__class__)

-         build = models.ArtifactBuild.create(db.session, ev, name,

+                 self.db_session, event.msg_id, event.search_key, event.__class__)

+         build = models.ArtifactBuild.create(self.db_session, ev, name,

                                              artifact_type.name.lower(),

                                              build_id, dep_on, state,

                                              original_nvr, rebuilt_nvr,

                                              rebuild_reason)

  

-         db.session.commit()

+         self.db_session.commit()

          return build

  

      def _match_allow_build_rule(self, criteria, rule):
@@ -483,12 +504,14 @@ 

          """

          if build.state != ArtifactBuildState.PLANNED.value:

              build.transition(

+                 self.db_session,

                  ArtifactBuildState.FAILED.value,

                  "Container image build is not in PLANNED state.")

              return

  

          if not build.build_args:

              build.transition(

+                 self.db_session,

                  ArtifactBuildState.FAILED.value,

                  "Container image does not have 'build_args' filled in.")

              return
@@ -519,6 +542,7 @@ 

  

          if not build.rebuilt_nvr:

              build.transition(

+                 self.db_session,

                  ArtifactBuildState.FAILED.value,

                  "Container image does not have rebuilt_nvr set.")

              return
@@ -606,13 +630,15 @@ 

                  return

              if build.build_id:

                  build.transition(

+                     self.db_session,

                      ArtifactBuildState.BUILD.value,

                      "Building container image in Koji.")

              else:

                  build.transition(

+                     self.db_session,

                      ArtifactBuildState.FAILED.value,

                      "Error while building container image in Koji.")

-             db.session.add(build)

-             db.session.commit()

+             self.db_session.add(build)

+             self.db_session.commit()

  

          list(map(build_image, builds))

@@ -23,7 +23,7 @@ 

  

  import requests

  

- from freshmaker import conf, db

+ from freshmaker import conf

  from freshmaker.models import Event

  from freshmaker.errata import Errata

  from freshmaker.pulp import Pulp
@@ -52,15 +52,15 @@ 

          if event.dry_run:

              self.force_dry_run()

  

-         db_event = Event.get_or_create_from_event(db.session, event)

+         db_event = Event.get_or_create_from_event(self.db_session, event)

          self.set_context(db_event)

  

          # Check if we are allowed to build this advisory.

          if not event.is_allowed(self):

              msg = ("Errata advisory {0} is not allowed by internal policy "

                     "to trigger Bob rebuilds.".format(event.advisory.errata_id))

-             db_event.transition(EventState.SKIPPED, msg)

-             db.session.commit()

+             db_event.transition(self.db_session, EventState.SKIPPED, msg)

+             self.db_session.commit()

              self.log_info(msg)

              return []

  
@@ -79,8 +79,8 @@ 

          if not repo_tags:

              msg = "No CDN repo found for advisory %r" % errata_id

              self.log_info(msg)

-             db_event.transition(EventState.FAILED, msg)

-             db.session.commit()

+             db_event.transition(self.db_session, EventState.FAILED, msg)

+             self.db_session.commit()

              return

  

          # Use the Pulp to get the Docker repository name from the CDN repository
@@ -137,5 +137,5 @@ 

          if num_impacted is not None:

              msg += " Bob is rebuilding %d impacted external image repositories." % (

                  num_impacted)

-         db_event.transition(EventState.COMPLETE, msg)

-         db.session.commit()

+         db_event.transition(self.db_session, EventState.COMPLETE, msg)

+         self.db_session.commit()

@@ -21,7 +21,7 @@ 

  #

  # Written by Filip Valder <fvalder@redhat.com>

  

- from freshmaker import conf, log, db

+ from freshmaker import conf, log

  from freshmaker.models import ArtifactBuild

  from freshmaker.handlers import BaseHandler

  from freshmaker.events import FreshmakerManageEvent
@@ -51,7 +51,7 @@ 

          log_fail = log.error if event.last_try else log.warning

          with koji_service(

                  conf.koji_profile, log, dry_run=event.dry_run) as session:

-             builds = db.session.query(ArtifactBuild).filter(

+             builds = self.db_session.query(ArtifactBuild).filter(

                  ArtifactBuild.id.in_(event.body['builds_id'])).all()

              for build in builds:

                  if session.cancel_build(build.build_id):
@@ -61,7 +61,7 @@ 

                      build.state_reason = ('Build was NOT canceled in external build system.'

                                            ' Max number of tries reached!')

                  failed_to_cancel_builds_id.append(build.id)

-             db.session.commit()

+             self.db_session.commit()

  

          if failed_to_cancel_builds_id:

              log_fail("Builds which failed to cancel in external build system,"

@@ -21,7 +21,7 @@ 

  #

  # Written by Chenxiong Qi <cqi@redhat.com>

  

- from freshmaker import db, log

+ from freshmaker import log

  from freshmaker.events import BrewSignRPMEvent, ErrataAdvisoryRPMsSignedEvent

  from freshmaker.handlers import BaseHandler

  from freshmaker.errata import Errata
@@ -53,7 +53,7 @@ 

          """

          ret = []

          for advisory in advisories:

-             if (db.session.query(Event).filter_by(

+             if (self.db_session.query(Event).filter_by(

                      search_key=str(advisory.errata_id)).count() != 0):

                  log.info("Skipping advisory %s (%d), already handled by "

                           "Freshmaker", advisory.name, advisory.errata_id)
@@ -101,9 +101,9 @@ 

              new_event = ErrataAdvisoryRPMsSignedEvent(

                  event.msg_id + "." + str(advisory.name), advisory)

              db_event = Event.create(

-                 db.session, new_event.msg_id, new_event.search_key,

+                 self.db_session, new_event.msg_id, new_event.search_key,

                  new_event.__class__, released=False)

-             db.session.add(db_event)

+             self.db_session.add(db_event)

              new_events.append(new_event)

-         db.session.commit()

+         self.db_session.commit()

          return new_events

@@ -19,7 +19,7 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  

- from freshmaker import db, log

+ from freshmaker import log

  from freshmaker.events import (

      ErrataAdvisoryStateChangedEvent, ErrataAdvisoryRPMsSignedEvent)

  from freshmaker.models import Event, EVENT_TYPES
@@ -59,7 +59,7 @@ 

          is not included in further container images rebuilds.

          """

          # check db to see whether this advisory exists in db

-         db_event = db.session.query(Event).filter_by(

+         db_event = self.db_session.query(Event).filter_by(

              event_type_id=EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent],

              search_key=str(errata_id)).first()

          if not db_event:
@@ -70,7 +70,7 @@ 

          self.set_context(db_event)

  

          db_event.released = True

-         db.session.commit()

+         self.db_session.commit()

          log.info("Errata advisory %d is now marked as released", errata_id)

  

      def rebuild_if_not_exists(self, event, errata_id):
@@ -82,7 +82,7 @@ 

          :return: List of extra events generated to initiate the rebuild.

          """

  

-         db_event = db.session.query(Event).filter_by(

+         db_event = self.db_session.query(Event).filter_by(

              event_type_id=EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent],

              search_key=str(errata_id)).first()

          if (db_event and db_event.state != EventState.FAILED.value and

@@ -19,7 +19,6 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  

- from freshmaker import db

  from freshmaker.events import ODCSComposeStateChangeEvent

  from freshmaker.models import ArtifactBuild, ArtifactBuildCompose, Compose

  from freshmaker.handlers import BaseHandler, fail_event_on_handler_exception
@@ -44,7 +43,7 @@ 

      @fail_event_on_handler_exception

      def handle(self, event):

          # Get all the builds waiting for this compose.

-         builds_with_compose = db.session.query(ArtifactBuild).join(

+         builds_with_compose = self.db_session.query(ArtifactBuild).join(

              ArtifactBuildCompose).join(Compose)

          builds_with_compose = builds_with_compose.filter(

              Compose.odcs_compose_id == event.compose["id"],
@@ -52,5 +51,6 @@ 

  

          for build in builds_with_compose:

              build.transition(

+                 self.db_session,

                  ArtifactBuildState.FAILED.value,

                  "ODCS compose %r is in failed state." % event.compose["id"])

@@ -21,7 +21,6 @@ 

  #

  # Written by Chenxiong Qi <cqi@redhat.com>

  

- from freshmaker import db

  from freshmaker.models import (

      ArtifactBuild, ArtifactBuildState, Compose, ArtifactBuildCompose)

  from freshmaker.handlers import (
@@ -46,7 +45,7 @@ 

          if event.dry_run:

              self.force_dry_run()

  

-         builds_ready_to_rebuild = db.session.query(ArtifactBuild).join(

+         builds_ready_to_rebuild = self.db_session.query(ArtifactBuild).join(

              ArtifactBuildCompose).join(Compose)

          # Get all the builds waiting for this compose in PLANNED state ...

          builds_ready_to_rebuild = builds_ready_to_rebuild.filter(

@@ -24,7 +24,6 @@ 

  

  from freshmaker import conf

  from freshmaker import log

- from freshmaker import db

  from freshmaker.errata import Errata

  from freshmaker.events import (

      BrewContainerTaskStateChangeEvent, ErrataAdvisoryRPMsSignedEvent)
@@ -57,7 +56,7 @@ 

          build_id = event.task_id

  

          # check db to see whether this build exists in db

-         found_build = db.session.query(ArtifactBuild).filter_by(

+         found_build = self.db_session.query(ArtifactBuild).filter_by(

              type=ArtifactType.IMAGE.value,

              build_id=build_id

          ).first()
@@ -86,13 +85,15 @@ 

  

                  ret, msg = self._verify_advisory_rpms_in_container_build(errata_id, container_build_id)

                  if ret:

-                     found_build.transition(ArtifactBuildState.DONE.value, "Built successfully.")

+                     found_build.transition(

+                         self.db_session, ArtifactBuildState.DONE.value, "Built successfully.")

                  else:

-                     found_build.transition(ArtifactBuildState.FAILED.value, msg)

+                     found_build.transition(self.db_session, ArtifactBuildState.FAILED.value, msg)

  

              # for other builds, mark them as DONE

              else:

-                 found_build.transition(ArtifactBuildState.DONE.value, "Built successfully.")

+                 found_build.transition(

+                     self.db_session, ArtifactBuildState.DONE.value, "Built successfully.")

          if event.new_state == 'FAILED':

              args = json.loads(found_build.build_args)

              if "retry_count" not in args:
@@ -106,14 +107,16 @@ 

                  found_build.rebuilt_nvr = get_rebuilt_nvr(

                      found_build.type, found_build.original_nvr)

                  found_build.transition(

+                     self.db_session,

                      ArtifactBuildState.PLANNED.value,

                      "Retrying failed build %s" % (str(found_build.build_id)))

                  self.start_to_build_images([found_build])

              else:

                  found_build.transition(

+                     self.db_session,

                      ArtifactBuildState.FAILED.value,

                      "Failed to build in Koji.")

-         db.session.commit()

+         self.db_session.commit()

  

      @fail_artifact_build_on_handler_exception()

      def rebuild_dependent_containers(self, found_build):
@@ -121,7 +124,7 @@ 

          if found_build.state == ArtifactBuildState.DONE.value:

              # check db to see whether there is any planned image build

              # depends on this build

-             planned_builds = db.session.query(ArtifactBuild).filter_by(

+             planned_builds = self.db_session.query(ArtifactBuild).filter_by(

                  type=ArtifactType.IMAGE.value,

                  state=ArtifactBuildState.PLANNED.value,

                  dep_on=found_build
@@ -161,11 +164,13 @@ 

  

          if num_failed:

              db_event.transition(

+                 self.db_session,

                  EventState.COMPLETE,

                  'Advisory %s: %d of %d container image(s) failed to rebuild.' % (

                      db_event.search_key, num_failed, len(db_event.builds.all()),))

          else:

              db_event.transition(

+                 self.db_session,

                  EventState.COMPLETE,

                  'Advisory %s: All %s container images have been rebuilt.' % (

                      db_event.search_key, len(db_event.builds.all()),))

@@ -26,7 +26,7 @@ 

  import koji

  import kobo

  

- from freshmaker import conf, db

+ from freshmaker import conf

  from freshmaker.events import ErrataAdvisoryRPMsSignedEvent

  from freshmaker.events import ManualRebuildWithAdvisoryEvent

  from freshmaker.handlers import ContainerBuildHandler, fail_event_on_handler_exception
@@ -73,17 +73,17 @@ 

          # triggered by user, we want to track what happened

  

          db_event = Event.get_or_create(

-             db.session, event.msg_id, event.search_key, event.__class__,

+             self.db_session, event.msg_id, event.search_key, event.__class__,

              released=False, manual=event.manual)

-         db.session.commit()

+         self.db_session.commit()

          self.set_context(db_event)

  

          # Check if we are allowed to build this advisory.

          if not self.event.is_allowed(self):

              msg = ("Errata advisory {0} is not allowed by internal policy "

                     "to trigger rebuilds.".format(event.advisory.errata_id))

-             db_event.transition(EventState.SKIPPED, msg)

-             db.session.commit()

+             db_event.transition(self.db_session, EventState.SKIPPED, msg)

+             self.db_session.commit()

              self.log_info(msg)

              return []

  
@@ -95,16 +95,17 @@ 

          if not builds:

              msg = 'No container images to rebuild for advisory %r' % event.advisory.name

              self.log_info(msg)

-             db_event.transition(EventState.SKIPPED, msg)

-             db.session.commit()

+             db_event.transition(self.db_session, EventState.SKIPPED, msg)

+             self.db_session.commit()

              return []

  

          if all([build.state == ArtifactBuildState.FAILED.value

                  for build in builds.values()]):

              db_event.transition(

+                 self.db_session,

                  EventState.COMPLETE,

                  "No container images to rebuild, all are in failed state.")

-             db.session.commit()

+             self.db_session.commit()

              return []

  

          if event.advisory.state != 'SHIPPED_LIVE':
@@ -123,11 +124,11 @@ 

          # Log what we are going to rebuild

          self._check_images_to_rebuild(db_event, builds)

          self.start_to_build_images(

-             db_event.get_image_builds_in_first_batch(db.session))

+             db_event.get_image_builds_in_first_batch(self.db_session))

  

          msg = 'Advisory %s: Rebuilding %d container images.' % (

              db_event.search_key, len(db_event.builds.all()))

-         db_event.transition(EventState.BUILDING, msg)

+         db_event.transition(self.db_session, EventState.BUILDING, msg)

  

          return []

  
@@ -172,6 +173,7 @@ 

              # print error and stop the rebuild.

              if old_printed_count == len(printed):

                  db_event.builds_transition(

+                     self.db_session,

                      ArtifactBuildState.FAILED.value,

                      "No image to be built in batch %d." % (batch))

                  self.log_error("Dumping the builds:")
@@ -204,7 +206,7 @@ 

          :rtype: dict

          """

          db_event = Event.get_or_create(

-             db.session, event.msg_id, event.search_key, event.__class__)

+             self.db_session, event.msg_id, event.search_key, event.__class__)

  

          # Used as tmp dict with {brew_build_nvr: ArtifactBuild, ...} mapping.

          builds = builds or {}
@@ -225,7 +227,8 @@ 

                                     "it is already in db", nvr)

                      continue

  

-                 parent_build = db_event.get_artifact_build_from_event_dependencies(nvr)

+                 parent_build = db_event.get_artifact_build_from_event_dependencies(

+                     self.db_session, nvr)

                  if parent_build:

                      self.log_debug(

                          "Skipping recording build %s, "
@@ -238,7 +241,8 @@ 

                  dep_on = builds[parent_nvr] if parent_nvr in builds else None

  

                  if parent_nvr:

-                     build = db_event.get_artifact_build_from_event_dependencies(parent_nvr)

+                     build = db_event.get_artifact_build_from_event_dependencies(

+                         self.db_session, parent_nvr)

                      if build:

                          parent_nvr = build[0].rebuilt_nvr

                          dep_on = None
@@ -280,7 +284,7 @@ 

                  # in case of error.

                  self.set_context(build)

  

-                 build.transition(state, state_reason)

+                 build.transition(self.db_session, state, state_reason)

  

                  build.build_args = json.dumps({

                      "repository": image["repository"],
@@ -292,7 +296,7 @@ 

                      "renewed_odcs_compose_ids": image["odcs_compose_ids"],

                  })

  

-                 db.session.commit()

+                 self.db_session.commit()

  

                  if state != ArtifactBuildState.FAILED.value:

                      # Store odcs pulp compose to build.
@@ -313,15 +317,15 @@ 

  

                              if build.state != ArtifactBuildState.FAILED.value:

                                  db_compose = Compose(odcs_compose_id=compose['id'])

-                                 db.session.add(db_compose)

-                                 db.session.commit()

+                                 self.db_session.add(db_compose)

+                                 self.db_session.commit()

                                  odcs_cache[cache_key] = db_compose

                              else:

                                  db_compose = None

-                                 db.session.commit()

+                                 self.db_session.commit()

                          if db_compose:

-                             build.add_composes(db.session, [db_compose])

-                             db.session.commit()

+                             build.add_composes(self.db_session, [db_compose])

+                             self.db_session.commit()

  

                      # Unpublished images can contain unreleased RPMs, so generate

                      # the ODCS compose with all the RPMs in the image to allow
@@ -330,10 +334,10 @@ 

                          compose = self.odcs.prepare_odcs_compose_with_image_rpms(image)

                          if compose:

                              db_compose = Compose(odcs_compose_id=compose['id'])

-                             db.session.add(db_compose)

-                             db.session.commit()

-                             build.add_composes(db.session, [db_compose])

-                             db.session.commit()

+                             self.db_session.add(db_compose)

+                             self.db_session.commit()

+                             build.add_composes(self.db_session, [db_compose])

+                             self.db_session.commit()

  

                  builds[nvr] = build

  

file modified
+77 -67
@@ -110,22 +110,23 @@ 

      username = db.Column(db.String(200), nullable=False, unique=True)

  

      @classmethod

-     def find_user_by_name(cls, username):

+     def find_user_by_name(cls, db_session, username):

          """Find a user by username

  

          :param str username: a string of username to find user

+         :param db_session: SQLAlchemy database session object.

          :return: user object if found, otherwise None is returned.

          :rtype: User

          """

          try:

-             return db.session.query(cls).filter(cls.username == username)[0]

+             return db_session.query(cls).filter(cls.username == username)[0]

          except IndexError:

              return None

  

      @classmethod

-     def create_user(cls, username):

+     def create_user(cls, db_session, username):

          user = cls(username=username)

-         db.session.add(user)

+         db_session.add(user)

          return user

  

  
@@ -171,7 +172,7 @@ 

          doc='Whether this event is triggered manually')

  

      @classmethod

-     def create(cls, session, message_id, search_key, event_type, released=True,

+     def create(cls, db_session, message_id, search_key, event_type, released=True,

                 state=None, manual=False, dry_run=False, requester=None):

          if event_type in EVENT_TYPES:

              event_type = EVENT_TYPES[event_type]
@@ -187,7 +188,7 @@ 

              dry_run=dry_run,

              requester=requester,

          )

-         session.add(event)

+         db_session.add(event)

          return event

  

      @validates('state')
@@ -201,35 +202,35 @@ 

          raise ValueError("%s: %s, not in %r" % (key, field, list(EventState)))

  

      @classmethod

-     def get(cls, session, message_id):

-         return session.query(cls).filter_by(message_id=message_id).first()

+     def get(cls, db_session, message_id):

+         return db_session.query(cls).filter_by(message_id=message_id).first()

  

      @classmethod

-     def get_or_create(cls, session, message_id, search_key, event_type,

+     def get_or_create(cls, db_session, message_id, search_key, event_type,

                        released=True, manual=False, dry_run=False):

-         instance = cls.get(session, message_id)

+         instance = cls.get(db_session, message_id)

          if instance:

              return instance

          instance = cls.create(

-             session, message_id, search_key, event_type,

+             db_session, message_id, search_key, event_type,

              released=released, manual=manual, dry_run=dry_run)

-         session.commit()

+         db_session.commit()

          return instance

  

      @classmethod

-     def get_or_create_from_event(cls, session, event, released=True):

-         return cls.get_or_create(session, event.msg_id,

+     def get_or_create_from_event(cls, db_session, event, released=True):

+         return cls.get_or_create(db_session, event.msg_id,

                                   event.search_key, event.__class__,

                                   released=released, manual=event.manual,

                                   dry_run=event.dry_run)

  

      @classmethod

-     def get_unreleased(cls, session, states=None):

+     def get_unreleased(cls, db_session, states=None):

          """

          Returns list of all unreleased events in given states. If no states

          are provided, returns only events in INITIALIZED, BUILDING or COMPLETE

          state.

-         :param session: db.session

+         :param db_session: SQLAlchemy database session object.

          :param list states: List of states to filter events for. If None,

              INITIALIZED, BUILDING and COMPLETE is used.

          :rtype: list of models.Event.
@@ -244,15 +245,15 @@ 

                  state.value if isinstance(state, EventState) else state for

                  state in states

              ]

-         return session.query(cls).filter(cls.released == false(),

-                                          cls.state.in_(states)).all()

+         return db_session.query(cls).filter(cls.released == false(),

+                                             cls.state.in_(states)).all()

  

      @classmethod

-     def get_by_event_id(cls, session, event_id):

-         return session.query(cls).filter_by(id=event_id).first()

+     def get_by_event_id(cls, db_session, event_id):

+         return db_session.query(cls).filter_by(id=event_id).first()

  

-     def get_image_builds_in_first_batch(self, session):

-         return session.query(ArtifactBuild).filter_by(

+     def get_image_builds_in_first_batch(self, db_session):

+         return db_session.query(ArtifactBuild).filter_by(

              dep_on=None,

              type=ArtifactType.IMAGE.value,

              event_id=self.id,
@@ -262,58 +263,58 @@ 

      def event_type(self):

          return INVERSE_EVENT_TYPES[self.event_type_id]

  

-     def add_event_dependency(self, session, event):

+     def add_event_dependency(self, db_session, event):

          """Add a dependent event

  

-         :param session: the `db.session`.

+         :param db_session: SQLAlchemy database session object.

          :param event: the dependent event to be added.

          :type event: :py:class:`Event`

          :return: instance of :py:class:`EventDependency`. Caller is responsible

              for committing changes to database. If `event` has been added

              already, nothing changed and `None` will be returned.

          """

-         dep = session.query(EventDependency.id).filter_by(

+         dep = db_session.query(EventDependency.id).filter_by(

              event_id=self.id, event_dependency_id=event.id).first()

          if dep is None:

              dep = EventDependency(event_id=self.id,

                                    event_dependency_id=event.id)

-             session.add(dep)

+             db_session.add(dep)

              return dep

          else:

              return None

  

-     @property

-     def event_dependencies(self):

+     def get_event_dependencies(self, db_session):

          """

          Returns the list of Events this Event depends on.

          """

          events = []

-         deps = EventDependency.query.filter_by(event_id=self.id).all()

+         deps = db_session.query(EventDependency).filter_by(event_id=self.id).all()

          for dep in deps:

-             events.append(Event.query.filter_by(

+             events.append(db_session.query(Event).filter_by(

                  id=dep.event_dependency_id).first())

          return events

  

-     @property

-     def depending_events(self):

+     def get_depending_events(self, db_session):

          """

          Returns the list of Events depending on this Event.

          """

          depending_events = []

-         parents = EventDependency.query.filter_by(event_dependency_id=self.id).all()

+         parents = db_session.query(EventDependency).filter_by(event_dependency_id=self.id).all()

          for p in parents:

-             depending_events.append(Event.query.filter_by(

+             depending_events.append(db_session.query(Event).filter_by(

                  id=p.event_id).first())

          return depending_events

  

-     def has_all_builds_in_state(self, state):

+     def has_all_builds_in_state(self, db_session, state):

          """

          Returns True when all builds are in the given `state`.

+ 

+         :param db_session: SQLAlchemy database session object.

          """

-         return db.session.query(ArtifactBuild).filter_by(

+         return db_session.query(ArtifactBuild).filter_by(

              event_id=self.id).filter(state != state).count() == 0

  

-     def builds_transition(self, state, reason, filters=None):

+     def builds_transition(self, db_session, state, reason, filters=None):

          """

          Calls transition(state, reason) for all builds associated with this

          event.
@@ -328,13 +329,16 @@ 

          builds_to_transition = self.builds.filter_by(

              **filters).all() if isinstance(filters, dict) else self.builds

  

-         return [build.id

-                 for build in builds_to_transition if build.transition(state, reason)]

+         return [

+             build.id for build in builds_to_transition

+             if build.transition(db_session, state, reason)

+         ]

  

-     def transition(self, state, state_reason=None):

+     def transition(self, db_session, state, state_reason=None):

          """

          Sets the time_done, state, and state_reason of this Event.

  

+         :param db_session: SQLAlchemy database session object.

          :param state: EventState value

          :param state_reason: Reason why this state has been set.

          :return: True/False, whether state was changed
@@ -368,8 +372,9 @@ 

          if EventState(state).counter:

              EventState(state).counter.inc()

  

-         db.session.commit()

-         messaging.publish('event.state.changed', self.json())

+         db_session.add(self)

+         db_session.commit()

+         messaging.publish('event.state.changed', self.json(db_session))

          messaging.publish('event.state.changed.min', self.json_min())

  

          return True
@@ -390,11 +395,11 @@ 

              return {}

          return json.loads(self.requester_metadata)

  

-     def json(self):

+     def json(self, db_session):

          data = self._common_json()

          data['builds'] = [b.json() for b in self.builds]

-         data['depends_on_events'] = [event.id for event in self.event_dependencies]

-         data['depending_events'] = [event.id for event in self.depending_events]

+         data['depends_on_events'] = [event.id for event in self.get_event_dependencies(db_session)]

+         data['depending_events'] = [event.id for event in self.get_depending_events(db_session)]

          return data

  

      def json_min(self):
@@ -410,7 +415,6 @@ 

  

      def _common_json(self):

          event_url = get_url_for('event', id=self.id)

-         db.session.add(self)

          return {

              "id": self.id,

              "message_id": self.message_id,
@@ -429,7 +433,7 @@ 

              "requester_metadata": self.requester_metadata_json,

          }

  

-     def find_dependent_events(self):

+     def find_dependent_events(self, db_session):

          """

          Find other unreleased Events which built the same builds (or just some

          of them) as this Event and adds them as a dependency for this event.
@@ -437,6 +441,8 @@ 

          Dependent events of may also rebuild some same images that current event

          will build. So, for building images found from current event, we also

          need those YUM repositories used to build images in dependent events.

+ 

+         :param db_session: SQLAlchemy database session object.

          """

          builds_nvrs = [build.name for build in self.builds]

  
@@ -444,7 +450,7 @@ 

                    EventState.BUILDING.value,

                    EventState.COMPLETE.value]

  

-         query = db.session.query(ArtifactBuild.event_id)

+         query = db_session.query(ArtifactBuild.event_id)

          dep_event_ids = query.join(ArtifactBuild.event).filter(

              ArtifactBuild.name.in_(builds_nvrs),

              ArtifactBuild.event_id != self.id,
@@ -455,23 +461,25 @@ 

          ).distinct()

  

          dep_events = []

-         query = db.session.query(Event)

+         query = db_session.query(Event)

          for row in dep_event_ids:

              dep_event = query.filter_by(id=row[0]).first()

-             self.add_event_dependency(db.session, dep_event)

+             self.add_event_dependency(db_session, dep_event)

              dep_events.append(dep_event)

-         db.session.commit()

+         db_session.commit()

          return dep_events

  

-     def get_artifact_build_from_event_dependencies(self, nvr):

+     def get_artifact_build_from_event_dependencies(self, db_session, nvr):

          """

          It returns the artifact build, with `DONE` state, from the event dependencies (the build

          of the parent event). `nvr` is used as `original_nvr` when finding the `ArtifactBuild`.

          It returns all the parent artifact builds from the first found event dependency.

          If the build is not found, it returns None.

+ 

+         :param db_session: SQLAlchemy database session object.

          """

-         for parent_event in self.event_dependencies:

-             parent_build = db.session.query(

+         for parent_event in self.get_event_dependencies(db_session):

+             parent_build = db_session.query(

                  ArtifactBuild).filter_by(event_id=parent_event.id,

                                           original_nvr=nvr,

                                           state=ArtifactBuildState.DONE.value).all()
@@ -533,7 +541,7 @@ 

      composes = db.relationship('ArtifactBuildCompose', back_populates='build')

  

      @classmethod

-     def create(cls, session, event, name, type,

+     def create(cls, db_session, event, name, type,

                 build_id=None, dep_on=None, state=None,

                 original_nvr=None, rebuilt_nvr=None,

                 rebuild_reason=0):
@@ -551,7 +559,7 @@ 

              dep_on=dep_on,

              rebuild_reason=rebuild_reason

          )

-         session.add(build)

+         db_session.add(build)

          return build

  

      @validates('state')
@@ -575,12 +583,12 @@ 

          raise ValueError("%s: %s, not in %r" % (key, field, list(ArtifactType)))

  

      @classmethod

-     def get_lowest_build_id(cls, session):

+     def get_lowest_build_id(cls, db_session):

          """

          Returns the lowest build_id. If there is no build so far,

          returns 0.

          """

-         build = (session.query(ArtifactBuild)

+         build = (db_session.query(ArtifactBuild)

                   .filter(cls.build_id != None)  # noqa

                   .order_by(ArtifactBuild.build_id.asc())

                   .first())
@@ -588,13 +596,13 @@ 

              return 0

          return build.build_id

  

-     def depending_artifact_builds(self):

+     def depending_artifact_builds(self, db_session):

          """

          Returns list of artifact builds depending on this one.

          """

-         return ArtifactBuild.query.filter_by(dep_on_id=self.id).all()

+         return db_session.query(ArtifactBuild).filter_by(dep_on_id=self.id).all()

  

-     def transition(self, state, state_reason):

+     def transition(self, db_session, state, state_reason):

          """

          Sets the state and state_reason of this ArtifactBuild.

  
@@ -631,11 +639,14 @@ 

          # can rebuild them.

          if self.state in [ArtifactBuildState.FAILED.value,

                            ArtifactBuildState.CANCELED.value]:

-             for build in self.depending_artifact_builds():

+             for build in self.depending_artifact_builds(db_session):

                  build.transition(

+                     db_session,

                      self.state, "Cannot build artifact, because its "

                      "dependency cannot be built.")

  

+         db_session.add(self)

+         db_session.commit()

          messaging.publish('build.state.changed', self.json())

  

          return True
@@ -651,7 +662,6 @@ 

              build_args = json.loads(self.build_args)

  

          build_url = get_url_for('build', id=self.id)

-         db.session.add(self)

          return {

              "id": self.id,

              "name": self.name,
@@ -684,10 +694,10 @@ 

                  break

          return dep_on

  

-     def add_composes(self, session, composes):

+     def add_composes(self, db_session, composes):

          """Add an ODCS compose to this build"""

          for compose in composes:

-             session.add(ArtifactBuildCompose(

+             db_session.add(ArtifactBuildCompose(

                  build_id=self.id, compose_id=compose.id))

  

      @property
@@ -712,12 +722,12 @@ 

                  self.odcs_compose_id)['state_name']

  

      @classmethod

-     def get_lowest_compose_id(cls, session):

+     def get_lowest_compose_id(cls, db_session):

          """

          Returns the lowest odcs_compose_id. If there is no compose,

          returns 0.

          """

-         compose = session.query(Compose).order_by(

+         compose = db_session.query(Compose).order_by(

              Compose.odcs_compose_id.asc()).first()

          if not compose:

              return 0

file modified
+2 -3
@@ -132,10 +132,9 @@ 

  

  

  def db_hook_event_listeners(target=None):

-     # Service-specific import of db

-     from freshmaker import db

- 

      if not target:

+         # Service-specific import of db

+         from freshmaker import db

          target = db.engine

  

      @event.listens_for(target, 'dbapi_error', named=True)

file modified
+13 -12
@@ -38,7 +38,7 @@ 

  from odcs.common.types import COMPOSE_STATES

  from requests.exceptions import HTTPError

  

- from freshmaker import conf, log, db

+ from freshmaker import conf, log

  from freshmaker.models import Compose

  from freshmaker.errata import Errata

  from freshmaker.kojiservice import koji_service
@@ -97,6 +97,7 @@ 

              instance associated.

          """

          self.handler = handler

+         self.db_session = self.handler.db_session

  

      def _fake_odcs_new_compose(

              self, compose_source, tag, packages=None, results=None,
@@ -116,7 +117,7 @@ 

          # In case we run in DRY_RUN mode, we need to initialize

          # FAKE_COMPOSE_ID to the id of last ODCS compose to give the IDs

          # increasing and unique even between Freshmaker restarts.

-         fake_compose_id = Compose.get_lowest_compose_id(db.session) - 1

+         fake_compose_id = Compose.get_lowest_compose_id(self.db_session) - 1

          if fake_compose_id >= 0:

              fake_compose_id = -1

  
@@ -202,21 +203,21 @@ 

  

          compose = self.prepare_yum_repo(db_event)

          db_composes.append(Compose(odcs_compose_id=compose['id']))

-         db.session.add(db_composes[-1])

+         self.db_session.add(db_composes[-1])

          repo_urls.append(compose['result_repofile'])

  

-         for dep_event in db_event.find_dependent_events():

+         for dep_event in db_event.find_dependent_events(self.db_session):

              compose = self.prepare_yum_repo(dep_event)

              db_composes.append(Compose(odcs_compose_id=compose['id']))

-             db.session.add(db_composes[-1])

+             self.db_session.add(db_composes[-1])

              repo_urls.append(compose['result_repofile'])

  

          # commit all new composes

-         db.session.commit()

+         self.db_session.commit()

  

          for build in db_event.builds:

-             build.add_composes(db.session, db_composes)

-         db.session.commit()

+             build.add_composes(self.db_session, db_composes)

+         self.db_session.commit()

  

          # Remove duplicates from repo_urls.

          return list(set(repo_urls))
@@ -246,8 +247,8 @@ 

              if compose_source and compose_source != source:

                  # TODO: Handle this by generating two ODCS composes

                  db_event.builds_transition(

-                     ArtifactBuildState.FAILED.value, "Packages for errata "

-                     "advisory %d found in multiple different tags."

+                     self.db_session, ArtifactBuildState.FAILED.value,

+                     "Packages for errata advisory %d found in multiple different tags."

                      % (errata_id))

                  return

              else:
@@ -255,8 +256,8 @@ 

  

          if compose_source is None:

              db_event.builds_transition(

-                 ArtifactBuildState.FAILED.value, 'None of builds %s of '

-                 'advisory %d is the latest build in its candidate tag.'

+                 self.db_session, ArtifactBuildState.FAILED.value,

+                 'None of builds %s of advisory %d is the latest build in its candidate tag.'

                  % (builds, errata_id))

              return

  

file modified
+8 -6
@@ -272,7 +272,7 @@ 

              if not show_full_json:

                  json_data['items'] = [item.json_min() for item in p_query.items]

              else:

-                 json_data['items'] = [item.json() for item in p_query.items]

+                 json_data['items'] = [item.json(db.session) for item in p_query.items]

  

              return jsonify(json_data), 200

  
@@ -281,7 +281,7 @@ 

              if event:

                  if not show_full_json:

                      return jsonify(event.json_min()), 200

-                 return jsonify(event.json()), 200

+                 return jsonify(event.json(db.session)), 200

              else:

                  return json_error(404, "Not Found", "No such event found.")

  
@@ -331,12 +331,14 @@ 

          msg = "Event id %s requested for canceling by user %s" % (event.id, g.user.username)

          log.info(msg)

  

-         event.transition(EventState.CANCELED, msg)

+         event.transition(db.session, EventState.CANCELED, msg)

          event.builds_transition(

+             db.session,

              ArtifactBuildState.CANCELED.value,

              "Build canceled before running on external build system.",

              filters={'state': ArtifactBuildState.PLANNED.value})

          builds_id = event.builds_transition(

+             db.session,

              ArtifactBuildState.CANCELED.value, None,

              filters={'state': ArtifactBuildState.BUILD.value})

          db.session.commit()
@@ -346,7 +348,7 @@ 

          data["builds_id"] = builds_id

          messaging.publish("manage.eventcancel", data)

          # Return back the JSON representation of Event to client.

-         return jsonify(event.json()), 200

+         return jsonify(event.json(db.session)), 200

  

  

  def _validate_rebuild_request(request):
@@ -528,7 +530,7 @@ 

          messaging.publish("manual.rebuild", data)

  

          # Return back the JSON representation of Event to client.

-         return jsonify(db_event.json()), 200

+         return jsonify(db_event.json(db.session)), 200

  

  

  class AsyncBuildAPI(MethodView):
@@ -614,7 +616,7 @@ 

          messaging.publish("async.manual.build", data)

  

          # Return back the JSON representation of Event to client.

-         return jsonify(db_event.json()), 200

+         return jsonify(db_event.json(db.session)), 200

  

  

  class AboutAPI(MethodView):

@@ -27,7 +27,7 @@ 

  from freshmaker.events import (ErrataAdvisoryStateChangedEvent,

                                 ManualRebuildWithAdvisoryEvent)

  from freshmaker.handlers.bob import RebuildImagesOnImageAdvisoryChange

- from freshmaker import models, db

+ from freshmaker import models

  from tests import helpers

  

  
@@ -43,7 +43,7 @@ 

                             product_short_name="product"))

          self.handler = RebuildImagesOnImageAdvisoryChange()

          self.db_event = models.Event.get_or_create(

-             db.session, self.event.msg_id, self.event.search_key,

+             self.handler.db_session, self.event.msg_id, self.event.search_key,

              self.event.__class__)

  

      def test_can_handle(self):
@@ -119,7 +119,7 @@ 

              'http://localhost/update_children/scl/bar-526',

              headers={'Authorization': 'Bearer x'})

  

-         db.session.refresh(self.db_event)

+         self.handler.db_session.refresh(self.db_event)

  

          self.assertEqual(self.db_event.state, models.EventState.COMPLETE.value)

  

@@ -75,6 +75,7 @@ 

          models.ArtifactBuild.create(

              db.session, self.db_event, "bash", "module", build_id=1238,

              state=ArtifactBuildState.CANCELED.value)

+         db.session.commit()

  

      @patch('freshmaker.kojiservice.KojiService.cancel_build')

      def test_cancel_event_on_freshmaker_manage_request(self, mocked_cancel_build):

@@ -133,7 +133,7 @@ 

  

          args, kwargs = start_to_build_images.call_args

          passed_builds = sorted(args[0], key=lambda build: build.id)

-         self.assertEqual([self.build_1, self.build_3], passed_builds)

+         self.assertEqual([self.build_1.id, self.build_3.id], [b.id for b in passed_builds])

  

      @patch('freshmaker.models.ArtifactBuild.composes_ready',

             new_callable=PropertyMock)
@@ -141,6 +141,8 @@ 

      def test_start_to_build_parent_image_done(self, start_to_build_images, composes_ready):

          composes_ready.return_value = True

          self.build_1.state = ArtifactBuildState.DONE.value

+         db.session.add(self.build_1)

+         db.session.commit()

  

          event = ODCSComposeStateChangeEvent(

              'msg-id', {'id': self.compose_1.id, 'state': 'done'}
@@ -151,4 +153,4 @@ 

  

          args, kwargs = start_to_build_images.call_args

          passed_builds = sorted(args[0], key=lambda build: build.id)

-         self.assertEqual([self.build_3, self.build_2], passed_builds)

+         self.assertEqual([self.build_3.id, self.build_2.id], [b.id for b in passed_builds])

@@ -40,6 +40,7 @@ 

          super(TestRebuildImagesOnParentImageBuild, self).setUp()

          events.BaseEvent.register_parser(BrewTaskStateChangeParser)

          self.handler = RebuildImagesOnParentImageBuild()

+         self.db_session = self.handler.db_session

  

      def test_can_handle_brew_container_task_closed_event(self):

          """
@@ -64,17 +65,24 @@ 

          """

          build_image.side_effect = [1, 2, 3]

          repo_urls.return_value = ["url"]

-         e1 = models.Event.create(db.session, "test_msg_id", "RHSA-2018-001", events.TestingEvent)

+         e1 = models.Event.create(

+             self.db_session, "test_msg_id", "RHSA-2018-001", events.TestingEvent)

          event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed'))

  

-         base_build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id)

+         base_build = models.ArtifactBuild.create(

+             self.db_session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id)

+ 

+         build_0 = models.ArtifactBuild.create(

+             self.db_session, e1, 'docker-up-0', ArtifactType.IMAGE, 0,

+             dep_on=base_build, state=ArtifactBuildState.PLANNED)

+         build_1 = models.ArtifactBuild.create(

+             self.db_session, e1, 'docker-up-1', ArtifactType.IMAGE, 0,

+             dep_on=base_build, state=ArtifactBuildState.PLANNED)

+         build_2 = models.ArtifactBuild.create(

+             self.db_session, e1, 'docker-up-2', ArtifactType.IMAGE, 0,

+             dep_on=base_build, state=ArtifactBuildState.PLANNED)

  

-         build_0 = models.ArtifactBuild.create(db.session, e1, 'docker-up-0', ArtifactType.IMAGE, 0,

-                                               dep_on=base_build, state=ArtifactBuildState.PLANNED)

-         build_1 = models.ArtifactBuild.create(db.session, e1, 'docker-up-1', ArtifactType.IMAGE, 0,

-                                               dep_on=base_build, state=ArtifactBuildState.PLANNED)

-         build_2 = models.ArtifactBuild.create(db.session, e1, 'docker-up-2', ArtifactType.IMAGE, 0,

-                                               dep_on=base_build, state=ArtifactBuildState.PLANNED)

+         self.db_session.commit()

  

          self.handler.handle(event)

          self.assertEqual(base_build.state, ArtifactBuildState.DONE.value)
@@ -101,15 +109,15 @@ 

          build_image.side_effect = [1, 2, 3, 4]

          repo_urls.return_value = ["url"]

          rebuilt_nvr.side_effect = ["foo-1-1.2", "foo-1-1.3"]

-         e1 = models.Event.create(db.session, "test_msg_id", "RHSA-2018-001", events.TestingEvent)

+         e1 = models.Event.create(self.db_session, "test_msg_id", "RHSA-2018-001", events.TestingEvent)

          event = self.get_event_from_msg(get_fedmsg('brew_container_task_failed'))

  

          base_build = models.ArtifactBuild.create(

-             db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id,

+             self.db_session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id,

              original_nvr='foo-1-1', rebuilt_nvr='foo-1-1.1')

          base_build.build_args = json.dumps({})

  

-         models.ArtifactBuild.create(db.session, e1, 'docker-up', ArtifactType.IMAGE, 0,

+         models.ArtifactBuild.create(self.db_session, e1, 'docker-up', ArtifactType.IMAGE, 0,

                                      dep_on=base_build, state=ArtifactBuildState.PLANNED)

          self.handler.handle(event)

          self.assertEqual(base_build.state, ArtifactBuildState.BUILD.value)
@@ -233,9 +241,9 @@ 

              ['foo-1.2.1-22.el7', 'bar-1.2.3-1.el7']

          )

  

-         e1 = models.Event.create(db.session, "test_msg_id", "2018001", events.ErrataAdvisoryRPMsSignedEvent)

+         e1 = models.Event.create(self.db_session, "test_msg_id", "2018001", events.ErrataAdvisoryRPMsSignedEvent)

          event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed'))

-         build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id)

+         build = models.ArtifactBuild.create(self.db_session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id)

  

          self.handler.handle(event)

  
@@ -261,9 +269,9 @@ 

              ['foo-1.2.1-22.el7', 'bar-1.2.3-1.el7']

          )

  

-         e1 = models.Event.create(db.session, "test_msg_id", "2018001", events.ErrataAdvisoryRPMsSignedEvent)

+         e1 = models.Event.create(self.db_session, "test_msg_id", "2018001", events.ErrataAdvisoryRPMsSignedEvent)

          event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed'))

-         build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id)

+         build = models.ArtifactBuild.create(self.db_session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id)

  

          self.handler.handle(event)

          self.assertEqual(build.state, ArtifactBuildState.FAILED.value)

@@ -264,7 +264,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler.handle(self.rhba_event)

  

-         db_event = Event.get(db.session, message_id='123')

+         db_event = Event.get(handler.db_session, message_id='123')

          self.assertEqual(db_event.state, EventState.SKIPPED.value)

  

      @patch.object(freshmaker.conf, 'handler_build_whitelist', new={
@@ -286,7 +286,7 @@ 

              handler = RebuildImagesOnRPMAdvisoryChange()

              handler.handle(self.rhba_event)

  

-             db_event = Event.get(db.session, message_id='123')

+             db_event = Event.get(handler.db_session, message_id='123')

              self.assertEqual(db_event.state, EventState.SKIPPED.value)

              if severity == "moderate":

                  self.assertTrue(db_event.state_reason.endswith(
@@ -315,7 +315,7 @@ 

              handler = RebuildImagesOnRPMAdvisoryChange()

              handler.handle(self.rhba_event)

  

-             db_event = Event.get(db.session, message_id='123')

+             db_event = Event.get(handler.db_session, message_id='123')

              self.assertEqual(db_event.state, EventState.SKIPPED.value)

              if not has_hightouch_bug:

                  self.assertTrue(db_event.state_reason.endswith(
@@ -335,7 +335,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler.handle(self.rhba_event)

  

-         db_event = Event.get(db.session, message_id='123')

+         db_event = Event.get(handler.db_session, message_id='123')

          self.assertEqual(db_event.state, EventState.SKIPPED.value)

          self.assertEqual(

              db_event.state_reason,
@@ -353,7 +353,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler.handle(self.rhba_event)

  

-         db_event = Event.get(db.session, message_id='123')

+         db_event = Event.get(handler.db_session, message_id='123')

          self.assertEqual(db_event.state, EventState.COMPLETE.value)

          self.assertEqual(

              db_event.state_reason,
@@ -373,7 +373,7 @@ 

          prepare_yum_repos_for_rebuilds.assert_called_once()

          start_to_build_images.assert_called_once()

  

-         db_event = Event.get(db.session, self.rhsa_event.msg_id)

+         db_event = Event.get(handler.db_session, self.rhsa_event.msg_id)

          self.assertEqual(EventState.BUILDING.value, db_event.state)

  

      @patch('freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.'
@@ -394,10 +394,10 @@ 

          handler.handle(event)

  

          prepare_yum_repos_for_rebuilds.assert_not_called()

-         get_image_builds_in_first_batch.assert_called_once_with(db.session)

+         get_image_builds_in_first_batch.assert_called_once_with(handler.db_session)

          start_to_build_images.assert_called_once()

  

-         db_event = Event.get(db.session, event.msg_id)

+         db_event = Event.get(handler.db_session, event.msg_id)

          self.assertEqual(EventState.BUILDING.value, db_event.state)

  

  
@@ -864,7 +864,7 @@ 

          handler._record_batches(batches, event)

  

          # Check that the images have proper data in proper db columns.

-         e = db.session.query(Event).filter(Event.id == 1).one()

+         e = handler.db_session.query(Event).filter(Event.id == 1).one()

          for build in e.builds:

              # child1_parent1 and child1 are in FAILED states, because LB failed

              # to resolve child1_parent1 and therefore also child1 cannot be
@@ -900,6 +900,8 @@ 

  

      def setUp(self):

          super(TestCheckImagesToRebuild, self).setUp()

+         self.handler = RebuildImagesOnRPMAdvisoryChange()

+         self.db_session = self.handler.db_session

  

          build_args = json.dumps({

              "original_parent": "nvr",
@@ -912,20 +914,20 @@ 

              "odcs_pulp_compose_id": 15,

          })

  

-         self.ev = Event.create(db.session, 'msg-id', '123',

+         self.ev = Event.create(self.db_session, 'msg-id', '123',

                                 EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent])

          self.b1 = ArtifactBuild.create(

-             db.session, self.ev, "parent", "image",

+             self.db_session, self.ev, "parent", "image",

              state=ArtifactBuildState.PLANNED,

              original_nvr="parent-1-25")

          self.b1.build_args = build_args

          self.b2 = ArtifactBuild.create(

-             db.session, self.ev, "child", "image",

+             self.db_session, self.ev, "child", "image",

              state=ArtifactBuildState.PLANNED,

              dep_on=self.b1,

              original_nvr="child-1-25")

          self.b2.build_args = build_args

-         db.session.commit()

+         self.db_session.commit()

  

      def test_check_images_to_rebuild(self):

          builds = {
@@ -933,12 +935,11 @@ 

              "child-1-25": self.b2

          }

  

-         handler = RebuildImagesOnRPMAdvisoryChange()

-         handler.set_context(self.ev)

-         handler._check_images_to_rebuild(self.ev, builds)

+         self.handler.set_context(self.ev)

+         self.handler._check_images_to_rebuild(self.ev, builds)

  

          # Check that the images have proper data in proper db columns.

-         e = db.session.query(Event).filter(Event.id == 1).one()

+         e = self.db_session.query(Event).filter(Event.id == 1).one()

          for build in e.builds:

              self.assertEqual(build.state, ArtifactBuildState.PLANNED.value)

  
@@ -949,12 +950,11 @@ 

              "parent-1-25": self.b1

          }

  

-         handler = RebuildImagesOnRPMAdvisoryChange()

-         handler.set_context(self.ev)

-         handler._check_images_to_rebuild(self.ev, builds)

+         self.handler.set_context(self.ev)

+         self.handler._check_images_to_rebuild(self.ev, builds)

  

          # Check that the images have proper data in proper db columns.

-         e = db.session.query(Event).filter(Event.id == 1).one()

+         e = self.db_session.query(Event).filter(Event.id == 1).one()

          for build in e.builds:

              self.assertEqual(build.state, ArtifactBuildState.FAILED.value)

  
@@ -965,12 +965,11 @@ 

              "something-1-25": self.b1,

          }

  

-         handler = RebuildImagesOnRPMAdvisoryChange()

-         handler.set_context(self.ev)

-         handler._check_images_to_rebuild(self.ev, builds)

+         self.handler.set_context(self.ev)

+         self.handler._check_images_to_rebuild(self.ev, builds)

  

          # Check that the images have proper data in proper db columns.

-         e = db.session.query(Event).filter(Event.id == 1).one()

+         e = self.db_session.query(Event).filter(Event.id == 1).one()

          for build in e.builds:

              self.assertEqual(build.state, ArtifactBuildState.FAILED.value)

  
@@ -1073,7 +1072,7 @@ 

          handler._record_batches(batches, self.mock_event)

  

          # Check parent image

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          parent_image = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1120,7 +1119,7 @@ 

          handler._record_batches(batches, self.mock_event)

  

          # Check parent image

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          parent_image = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1160,7 +1159,7 @@ 

          handler._record_batches(batches, self.mock_event)

  

          # Check parent image

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          parent_image = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1243,7 +1242,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler._record_batches(batches, self.mock_event)

  

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          parent_build = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1290,7 +1289,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler._record_batches(batches, self.mock_event)

  

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          build = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1327,7 +1326,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler._record_batches(batches, self.mock_event)

  

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          build = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1407,7 +1406,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler._record_batches(batches, self.mock_event)

  

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          build = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).first()
@@ -1448,7 +1447,7 @@ 

          handler = RebuildImagesOnRPMAdvisoryChange()

          handler._record_batches(batches, self.mock_event)

  

-         build = db.session.query(ArtifactBuild).filter_by(

+         build = handler.db_session.query(ArtifactBuild).filter_by(

              original_nvr='rhel-server-docker-7.3-82').first()

          self.assertEqual(ArtifactBuildState.FAILED.value, build.state)

  
@@ -1548,7 +1547,7 @@ 

          handler._record_batches(batches, et_event)

  

          # Check parent image

-         query = db.session.query(ArtifactBuild)

+         query = handler.db_session.query(ArtifactBuild)

          parent_image = query.filter(

              ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

          ).all()

file modified
+14 -14
@@ -92,14 +92,14 @@ 

          ArtifactBuild.create(db.session, event, "perl-runtime", "module", 1237)

          db.session.commit()

  

-         deps = set(parent.depending_artifact_builds())

+         deps = set(parent.depending_artifact_builds(db.session))

          self.assertEqual(deps, set([build2, build3]))

  

      def test_event_transition(self):

          for i, state in enumerate([

                  EventState.COMPLETE, EventState.COMPLETE.value, "complete"]):

              event = Event.create(db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent)

-             event.transition(state, "reason")

+             event.transition(db.session, state, "reason")

              self.assertEqual(event.state, EventState.COMPLETE.value)

              self.assertTrue(event.time_done is not None)

  
@@ -113,7 +113,7 @@ 

              build4 = ArtifactBuild.create(db.session, event, "perl-runtime", "module", 1237)

              db.session.commit()

  

-             build1.transition(state, "reason")

+             build1.transition(db.session, state, "reason")

              self.assertEqual(build1.state, state)

              self.assertEqual(build1.state_reason, "reason")

  
@@ -136,7 +136,7 @@ 

              build4 = ArtifactBuild.create(db.session, event, "perl-runtime", "module", 1237)

              db.session.commit()

  

-             build1.transition(state, "reason")

+             build1.transition(db.session, state, "reason")

              self.assertEqual(build1.state, state)

              self.assertEqual(build1.state_reason, "reason")

  
@@ -286,7 +286,7 @@ 

          db.session.commit()

  

      def test_find_dependent_events(self):

-         dep_events = self.event_1.find_dependent_events()

+         dep_events = self.event_1.find_dependent_events(db.session)

          self.assertEqual([self.event_2.id, self.event_3.id],

                           sorted([event.id for event in dep_events]))

  
@@ -387,8 +387,8 @@ 

      def test_event_dependencies(self):

          event = Event.create(db.session, "test_msg_id", "test", events.TestingEvent)

          db.session.commit()

-         self.assertEqual(event.event_dependencies, [])

-         self.assertEqual(event.depending_events, [])

+         self.assertEqual(event.get_event_dependencies(db.session), [])

+         self.assertEqual(event.get_depending_events(db.session), [])

  

      def test_add_a_dependent_event(self):

          event = Event.create(db.session, "test_msg_id", "test", events.TestingEvent)
@@ -398,11 +398,11 @@ 

          event.add_event_dependency(db.session, event1)

          db.session.commit()

  

-         self.assertEqual(event.event_dependencies, [event1])

-         self.assertEqual(event.event_dependencies[0].search_key, "test2")

-         self.assertEqual(event1.event_dependencies, [])

-         self.assertEqual(event1.depending_events, [event])

-         self.assertEqual(event.depending_events, [])