#1309 Fix tests in order to run with PostgreSQL
Merged 4 years ago by mprahl. Opened 4 years ago by cqi.
cqi/fm-orchestrator run-unittests-with-pgsql  into  master

file modified
+100 -69
@@ -147,12 +147,16 @@ 

              # Just to possibly confuse tests by adding another base module.

              mmd = mmd.copy("bootstrap", stream)

              import_mmd(db.session, mmd)

-     with make_session(conf) as session:

-         _populate_data(session, data_size, contexts=contexts, scratch=scratch)

+     with make_session(conf) as db_session:

+         _populate_data(db_session, data_size, contexts=contexts, scratch=scratch)

  

  

- def _populate_data(session, data_size=10, contexts=False, scratch=False):

-     arch = module_build_service.models.ModuleArch.query.get(1)

+ def _populate_data(db_session, data_size=10, contexts=False, scratch=False):

+     # Query arch from passed database session, otherwise there will be an error

+     # like "Object '<ModuleBuild at 0x7f4ccc805c50>' is already attached to

+     # session '275' (this is '276')" when add new module build object to passed

+     # session.

+     arch = db_session.query(module_build_service.models.ModuleArch).get(1)

      num_contexts = 2 if contexts else 1

      for index in range(data_size):

          for context in range(num_contexts):
@@ -191,11 +195,11 @@ 

                  combined_hashes = "{0}:{1}".format(unique_hash, unique_hash)

                  build_one.context = hashlib.sha1(combined_hashes.encode("utf-8")).hexdigest()[:8]

  

-             session.add(build_one)

-             session.commit()

+             db_session.add(build_one)

+             db_session.commit()

              build_one_component_release = get_rpm_release(build_one)

  

-             component_one_build_one = ComponentBuild(

+             db_session.add(ComponentBuild(

                  package="nginx",

                  scmurl="git://pkgs.domain.local/rpms/nginx?"

                         "#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3",
@@ -207,9 +211,8 @@ 

                  module_id=2 + index * 3,

                  tagged=True,

                  tagged_in_final=True,

-             )

- 

-             component_two_build_one = ComponentBuild(

+             ))

+             db_session.add(ComponentBuild(

                  package="module-build-macros",

                  scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

                         "module-build-macros-0.1-1.module_nginx_1_2.src.rpm",
@@ -221,7 +224,8 @@ 

                  module_id=2 + index * 3,

                  tagged=True,

                  tagged_in_final=True,

-             )

+             ))

+             db_session.commit()

  

          build_two = ModuleBuild(

              name="postgressql",
@@ -242,11 +246,12 @@ 

          )

          build_two.arches.append(arch)

  

-         session.add(build_two)

-         session.commit()

+         db_session.add(build_two)

+         db_session.commit()

+ 

          build_two_component_release = get_rpm_release(build_two)

  

-         component_one_build_two = ComponentBuild(

+         db_session.add(ComponentBuild(

              package="postgresql",

              scmurl="git://pkgs.domain.local/rpms/postgresql"

                     "?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3",
@@ -258,9 +263,8 @@ 

              module_id=3 + index * 3,

              tagged=True,

              tagged_in_final=True,

-         )

- 

-         component_two_build_two = ComponentBuild(

+         ))

+         db_session.add(ComponentBuild(

              package="module-build-macros",

              scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

                     "module-build-macros-0.1-1.module_postgresql_1_2.src.rpm",
@@ -270,7 +274,9 @@ 

              nvr="module-build-macros-01-1.{0}".format(build_two_component_release),

              batch=1,

              module_id=3 + index * 3,

-         )

+         ))

+ 

+         db_session.commit()

  

          build_three = ModuleBuild(

              name="testmodule",
@@ -289,11 +295,12 @@ 

              time_completed=None,

              rebuild_strategy="changed-and-after",

          )

-         session.add(build_three)

-         session.commit()

+         db_session.add(build_three)

+         db_session.commit()

+ 

          build_three_component_release = get_rpm_release(build_three)

  

-         component_one_build_three = ComponentBuild(

+         db_session.add(ComponentBuild(

              package="rubygem-rails",

              scmurl="git://pkgs.domain.local/rpms/rubygem-rails"

                     "?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3",
@@ -303,9 +310,9 @@ 

              nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release),

              batch=2,

              module_id=4 + index * 3,

-         )

+         ))

  

-         component_two_build_three = ComponentBuild(

+         db_session.add(ComponentBuild(

              package="module-build-macros",

              scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"

                     "module-build-macros-0.1-1.module_testmodule_1_2.src.rpm",
@@ -317,18 +324,12 @@ 

              module_id=4 + index * 3,

              tagged=True,

              build_time_only=True,

-         )

+         ))

  

-         session.add(component_one_build_one)

-         session.add(component_two_build_one)

-         session.add(component_one_build_two)

-         session.add(component_two_build_two)

-         session.add(component_one_build_three)

-         session.add(component_two_build_three)

-         session.commit()

+         db_session.commit()

  

  

- def scheduler_init_data(tangerine_state=None, scratch=False):

+ def scheduler_init_data(db_session, tangerine_state=None, scratch=False):

      """ Creates a testmodule in the building state with all the components in the same batch

      """

      clean_database()
@@ -339,13 +340,10 @@ 

      mmd = load_mmd_file(formatted_testmodule_yml_path)

      mmd.get_rpm_component("tangerine").set_buildorder(0)

  

-     platform_br = module_build_service.models.ModuleBuild.query.get(1)

-     arch = module_build_service.models.ModuleArch.query.get(1)

- 

      module_build = module_build_service.models.ModuleBuild(

          name="testmodule",

          stream="master",

-         version=20170109091357,

+         version='20170109091357',

          state=BUILD_STATES["build"],

          scratch=scratch,

          build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
@@ -364,12 +362,20 @@ 

          modulemd=mmd_to_str(mmd),

      )

  

-     module_build.arches.append(arch)

+     db_session.add(module_build)

+     db_session.commit()

+ 

+     platform_br = db_session.query(module_build_service.models.ModuleBuild).get(1)

      module_build.buildrequires.append(platform_br)

+ 

+     arch = db_session.query(module_build_service.models.ModuleArch).get(1)

+     module_build.arches.append(arch)

+ 

      build_one_component_release = get_rpm_release(module_build)

  

-     module_build.component_builds.extend([

+     module_build_comp_builds = [

          module_build_service.models.ComponentBuild(

+             module_id=module_build.id,

              package="perl-Tangerine",

              scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

                     "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
@@ -383,6 +389,7 @@ 

              tagged_in_final=True,

          ),

          module_build_service.models.ComponentBuild(

+             module_id=module_build.id,

              package="perl-List-Compare",

              scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

                     "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
@@ -396,6 +403,7 @@ 

              tagged_in_final=True,

          ),

          module_build_service.models.ComponentBuild(

+             module_id=module_build.id,

              package="tangerine",

              scmurl="https://src.fedoraproject.org/rpms/tangerine"

                     "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
@@ -411,6 +419,7 @@ 

              tagged_in_final=tangerine_state == koji.BUILD_STATES["COMPLETE"],

          ),

          module_build_service.models.ComponentBuild(

+             module_id=module_build.id,

              package="module-build-macros",

              scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

                     "macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",
@@ -422,12 +431,10 @@ 

              tagged=True,

              build_time_only=True,

          ),

-     ])

- 

-     with make_session(conf) as session:

-         session.add(platform_br)

-         session.add(module_build)

-         session.commit()

+     ]

+     for c in module_build_comp_builds:

+         db_session.add(c)

+     db_session.commit()

  

  

  def reuse_component_init_data():
@@ -438,13 +445,10 @@ 

          current_dir, "staged_data", "formatted_testmodule.yaml")

      mmd = load_mmd_file(formatted_testmodule_yml_path)

  

-     platform_br = module_build_service.models.ModuleBuild.query.get(1)

-     arch = module_build_service.models.ModuleArch.query.get(1)

- 

      build_one = module_build_service.models.ModuleBuild(

          name="testmodule",

          stream="master",

-         version=20170109091357,

+         version='20170109091357',

          state=BUILD_STATES["ready"],

          ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

          runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
@@ -468,11 +472,20 @@ 

      xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"

      mmd.set_xmd(xmd)

      build_one.modulemd = mmd_to_str(mmd)

-     build_one.arches.append(arch)

+ 

+     db.session.add(build_one)

+     db.session.commit()

+     db.session.refresh(build_one)

+ 

+     platform_br = module_build_service.models.ModuleBuild.query.get(1)

      build_one.buildrequires.append(platform_br)

  

-     build_one.component_builds.extend([

+     arch = module_build_service.models.ModuleArch.query.get(1)

+     build_one.arches.append(arch)

+ 

+     build_one_comp_builds = [

          module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

              package="perl-Tangerine",

              scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

                     "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
@@ -486,6 +499,7 @@ 

              tagged_in_final=True,

          ),

          module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

              package="perl-List-Compare",

              scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

                     "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
@@ -499,6 +513,7 @@ 

              tagged_in_final=True,

          ),

          module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

              package="tangerine",

              scmurl="https://src.fedoraproject.org/rpms/tangerine"

                     "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
@@ -512,6 +527,7 @@ 

              tagged_in_final=True,

          ),

          module_build_service.models.ComponentBuild(

+             module_id=build_one.id,

              package="module-build-macros",

              scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

                     "macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",
@@ -523,12 +539,17 @@ 

              tagged=True,

              build_time_only=True,

          ),

-     ])

+     ]

+     for c in build_one_comp_builds:

+         db.session.add(c)

+ 

+     # Commit component builds added to build_one

+     db.session.commit()

  

      build_two = module_build_service.models.ModuleBuild(

          name="testmodule",

          stream="master",

-         version=20170219191323,

+         version='20170219191323',

          state=BUILD_STATES["build"],

          ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",

          runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
@@ -551,11 +572,17 @@ 

      xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"

      mmd.set_xmd(xmd)

      build_two.modulemd = mmd_to_str(mmd)

+ 

+     db.session.add(build_two)

+     db.session.commit()

+     db.session.refresh(build_two)

+ 

      build_two.arches.append(arch)

      build_two.buildrequires.append(platform_br)

  

-     build_two.component_builds.extend([

+     build_two_comp_builds = [

          module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

              package="perl-Tangerine",

              scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"

                     "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
@@ -564,6 +591,7 @@ 

              ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",

          ),

          module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

              package="perl-List-Compare",

              scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"

                     "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
@@ -572,6 +600,7 @@ 

              ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",

          ),

          module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

              package="tangerine",

              scmurl="https://src.fedoraproject.org/rpms/tangerine"

                     "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
@@ -580,6 +609,7 @@ 

              ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",

          ),

          module_build_service.models.ComponentBuild(

+             module_id=build_two.id,

              package="module-build-macros",

              scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"

                     "macros-0.1-1.module_testmodule_master_20170219191323.src.rpm",
@@ -591,13 +621,12 @@ 

              tagged=True,

              build_time_only=True,

          ),

-     ])

+     ]

+     for c in build_two_comp_builds:

+         db.session.add(c)

  

-     with make_session(conf) as session:

-         session.add(platform_br)

-         session.add(build_one)

-         session.add(build_two)

-         session.commit()

+     # Commit component builds added to build_two

+     db.session.commit()

  

  

  def reuse_shared_userspace_init_data():
@@ -714,6 +743,7 @@ 

  

  

  def make_module(

+     db_session,

      nsvc,

      requires_list=None,

      build_requires_list=None,
@@ -728,6 +758,7 @@ 

      Creates new models.ModuleBuild defined by `nsvc` string with requires

      and buildrequires set according to ``requires_list`` and ``build_requires_list``.

  

+     :param db_session: SQLAlchemy database session.

      :param str nsvc: name:stream:version:context of a module.

      :param list_of_dicts requires_list: List of dictionaries defining the

          requires in the mmd requires field format.
@@ -826,33 +857,33 @@ 

      )

      if base_module:

          module_build.buildrequires.append(base_module)

-     db.session.add(module_build)

-     db.session.commit()

+     db_session.add(module_build)

+     db_session.commit()

  

      if virtual_streams:

          for virtual_stream in virtual_streams:

-             vs_obj = db.session.query(VirtualStream).filter_by(name=virtual_stream).first()

+             vs_obj = db_session.query(VirtualStream).filter_by(name=virtual_stream).first()

              if not vs_obj:

                  vs_obj = VirtualStream(name=virtual_stream)

-                 db.session.add(vs_obj)

-                 db.session.commit()

+                 db_session.add(vs_obj)

+                 db_session.commit()

  

              if vs_obj not in module_build.virtual_streams:

                  module_build.virtual_streams.append(vs_obj)

-                 db.session.commit()

+                 db_session.commit()

  

      if arches is None:

          arches = ["x86_64"]

      for arch in arches:

-         arch_obj = db.session.query(module_build_service.models.ModuleArch).filter_by(

+         arch_obj = db_session.query(module_build_service.models.ModuleArch).filter_by(

              name=arch).first()

          if not arch_obj:

              arch_obj = module_build_service.models.ModuleArch(name=arch)

-             db.session.add(arch_obj)

-             db.session.commit()

+             db_session.add(arch_obj)

+             db_session.commit()

  

          if arch_obj not in module_build.arches:

              module_build.arches.append(arch_obj)

-             db.session.commit()

+             db_session.commit()

  

      return module_build

file modified
+8
@@ -22,6 +22,8 @@ 

  

  import pytest

  

+ from module_build_service import conf

+ from module_build_service.models import make_session

  from module_build_service.utils.general import load_mmd_file, mmd_to_str

  

  
@@ -57,3 +59,9 @@ 

  @pytest.fixture()

  def platform_mmd():

      return PLATFORM_MODULEMD

+ 

+ 

+ @pytest.fixture(scope="function")

+ def db_session():

+     with make_session(conf) as db_session:

+         yield db_session

file modified
+125 -125
@@ -36,6 +36,7 @@ 

  import module_build_service.utils

  from module_build_service.errors import Forbidden

  from module_build_service import db, models, conf, build_logs

+ from module_build_service.scheduler import make_simple_stop_condition

  

  from mock import patch, PropertyMock, Mock

  from werkzeug.datastructures import FileStorage
@@ -347,6 +348,15 @@ 

      import moksha.hub.reactor  # noqa

  

  

+ class BaseTestBuild:

+ 

+     def run_scheduler(self, db_session, msgs=None, stop_condition=None):

+         module_build_service.scheduler.main(

+             msgs or [],

+             stop_condition or make_simple_stop_condition(db_session)

+         )

+ 

+ 

  @patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules")

  @patch.object(

      module_build_service.config.Config, "system", new_callable=PropertyMock, return_value="test"
@@ -391,7 +401,7 @@ 

          ]),

      },

  )

- class TestBuild:

+ class TestBuild(BaseTestBuild):

      # Global variable used for tests if needed

      _global_var = None

  
@@ -414,7 +424,8 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build(

-         self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg, hmsc, mmd_version

+         self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg,

+         hmsc, mmd_version, db_session

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -464,9 +475,7 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -493,7 +502,8 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_no_components(

-         self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg, hmsc, gating_result

+         self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg,

+         hmsc, gating_result, db_session

      ):

          """

          Tests the build of a module with no components
@@ -516,9 +526,8 @@ 

  

          data = json.loads(rv.data)

          module_build_id = data["id"]

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+ 

+         self.run_scheduler(db_session)

  

          module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

          # Make sure no component builds were registered
@@ -591,7 +600,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_from_yaml_allowed(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -612,14 +621,16 @@ 

                  )

              data = json.loads(rv.data)

              assert data["id"] == 2

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+ 

+         self.run_scheduler(db_session)

+ 

          assert models.ModuleBuild.query.first().state == models.BUILD_STATES["ready"]

  

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

-     def test_submit_build_cancel(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

+     def test_submit_build_cancel(

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+     ):

          """

          Submit all builds for a module and cancel the module build later.

          """
@@ -662,9 +673,7 @@ 

          FakeModuleBuilder.on_cancel_cb = on_cancel_cb

          FakeModuleBuilder.on_finalize_cb = on_finalize_cb

  

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          # Because we did not finished single component build and canceled the

          # module build, all components and even the module itself should be in
@@ -682,7 +691,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_instant_complete(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -704,9 +713,7 @@ 

          module_build_id = data["id"]

          FakeModuleBuilder.INSTANT_COMPLETE = True

  

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -725,7 +732,8 @@ 

          return_value=1,

      )

      def test_submit_build_concurrent_threshold(

-         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

+         conf_system, dbg, hmsc, db_session

      ):

          """

          Tests the build of testmodule.yaml using FakeModuleBuilder with
@@ -751,17 +759,16 @@ 

              Stop the scheduler when the module is built or when we try to build

              more components than the num_concurrent_builds.

              """

-             main_stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+             main_stop = make_simple_stop_condition(db_session)

              build_count = (

-                 db.session.query(models.ComponentBuild).filter_by(

+                 db_session.query(models.ComponentBuild).filter_by(

                      state=koji.BUILD_STATES["BUILDING"]

                  ).count()

              )

              over_threshold = conf.num_concurrent_builds < build_count

              return main_stop(message) or over_threshold

  

-         msgs = []

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session, stop_condition=stop)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -782,7 +789,8 @@ 

          return_value=2,

      )

      def test_try_to_reach_concurrent_threshold(

-         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

+         conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that we try to submit new component build right after
@@ -813,9 +821,9 @@ 

              Stop the scheduler when the module is built or when we try to build

              more components than the num_concurrent_builds.

              """

-             main_stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

+             main_stop = module_build_service.scheduler.make_simple_stop_condition(db_session)

              num_building = (

-                 db.session.query(models.ComponentBuild)

+                 db_session.query(models.ComponentBuild)

                  .filter_by(state=koji.BUILD_STATES["BUILDING"])

                  .count()

              )
@@ -823,8 +831,7 @@ 

              TestBuild._global_var.append(num_building)

              return main_stop(message) or over_threshold

  

-         msgs = []

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session, stop_condition=stop)

  

          # _global_var looks similar to this: [0, 1, 0, 0, 2, 2, 1, 0, 0, 0]

          # It shows the number of concurrent builds in the time. At first we
@@ -847,7 +854,8 @@ 

          return_value=1,

      )

      def test_build_in_batch_fails(

-         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

+         conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that if the build in batch fails, other components in a batch
@@ -885,9 +893,7 @@ 

  

          FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb

  

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

              # perl-Tangerine is expected to fail as configured in on_build_cb.
@@ -917,7 +923,8 @@ 

          return_value=1,

      )

      def test_all_builds_in_batch_fail(

-         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,

+         conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that if the build in batch fails, other components in a batch
@@ -946,9 +953,7 @@ 

  

          FakeModuleBuilder.on_build_cb = on_build_cb

  

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

              # perl-Tangerine is expected to fail as configured in on_build_cb.
@@ -971,7 +976,9 @@ 

  

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

-     def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

+     def test_submit_build_reuse_all(

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+     ):

          """

          Tests that we do not try building module-build-macros when reusing all

          components in a module build.
@@ -1011,9 +1018,7 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         msgs = [MBSModule("local module build", 3, 1)]

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session, msgs=[MBSModule("local module build", 3, 1)])

  

          reused_component_ids = {

              "module-build-macros": None,
@@ -1035,7 +1040,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_reuse_all_without_build_macros(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that we can reuse components even when the reused module does
@@ -1043,10 +1048,14 @@ 

          """

          reuse_component_init_data()

  

-         models.ComponentBuild.query.filter_by(package="module-build-macros").delete()

-         assert len(models.ComponentBuild.query.filter_by(package="module-build-macros").all()) == 0

+         db_session.query(models.ComponentBuild).filter_by(package="module-build-macros").delete()

+         assert (

+             0 == db_session.query(models.ComponentBuild)

+                            .filter_by(package="module-build-macros")

+                            .count()

+         )

  

-         db.session.commit()

+         db_session.commit()

  

          def on_build_cb(cls, artifact_name, source):

              raise ValueError("All components should be reused, not build.")
@@ -1081,13 +1090,11 @@ 

  

          FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb

  

-         msgs = [MBSModule("local module build", 3, 1)]

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session, msgs=[MBSModule("local module build", 3, 1)])

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=3).all():

+         for build in db_session.query(models.ComponentBuild).filter_by(module_id=3).all():

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -1097,7 +1104,9 @@ 

  

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

-     def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc):

+     def test_submit_build_resume(

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

+     ):

          """

          Tests that resuming the build works even when previous batches

          are already built.
@@ -1207,9 +1216,11 @@ 

          module_build_id = data["id"]

          module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()

          components = (

-             models.ComponentBuild.query.filter_by(module_id=module_build_id, batch=2)

-             .order_by(models.ComponentBuild.id)

-             .all()

+             models.ComponentBuild

+                   .query

+                   .filter_by(module_id=module_build_id, batch=2)

+                   .order_by(models.ComponentBuild.id)

+                   .all()

          )

          # Make sure the build went from failed to wait

          assert module_build.state == models.BUILD_STATES["wait"]
@@ -1219,13 +1230,12 @@ 

          db.session.expire_all()

  

          # Run the backend

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.

-         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+         for build in models.ComponentBuild.query.filter_by(

+                 module_id=module_build_id).all():

              assert build.state == koji.BUILD_STATES["COMPLETE"]

              assert build.module_build.state in [

                  models.BUILD_STATES["done"],
@@ -1235,7 +1245,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_recover_orphaned_macros(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that resuming the build works when module-build-macros is orphaned but marked as
@@ -1344,9 +1354,7 @@ 

          db.session.expire_all()

  

          # Run the backend

-         msgs = []

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1360,14 +1368,13 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_failed_init(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that resuming the build works when the build failed during the init step

          """

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

  

          with patch("module_build_service.utils.submit.format_mmd") as mock_format_mmd:

              mock_format_mmd.side_effect = Forbidden("Custom component repositories aren't allowed.")
@@ -1380,7 +1387,7 @@ 

                  }),

              )

              # Run the backend so that it fails in the "init" handler

-             module_build_service.scheduler.main([], stop)

+             self.run_scheduler(db_session)

              cleanup_moksha()

  

          module_build_id = json.loads(rv.data)["id"]
@@ -1417,7 +1424,7 @@ 

          db.session.expire_all()

  

          # Run the backend again

-         module_build_service.scheduler.main([], stop)

+         self.run_scheduler(db_session)

  

          # All components should be built and module itself should be in "done"

          # or "ready" state.
@@ -1432,7 +1439,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_resume_init_fail(

-         self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that resuming the build fails when the build is in init state
@@ -1450,8 +1457,7 @@ 

          )

          assert rv.status_code == 201

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main([], stop)

+         self.run_scheduler(db_session)

          # Post again and make sure it fails

          rv2 = self.client.post(

              "/module-build-service/1/module-builds/",
@@ -1480,7 +1486,7 @@ 

          return_value=True,

      )

      def test_submit_scratch_vs_normal(

-         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that submitting a scratch build with the same NSV as a previously
@@ -1504,8 +1510,7 @@ 

          # make sure normal build has expected context without a suffix

          assert module_build.context == "9c690d0e"

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main([], stop)

+         self.run_scheduler(db_session)

          # Post again as a scratch build and make sure it succeeds

          post_data["scratch"] = True

          rv2 = self.client.post(post_url, data=json.dumps(post_data))
@@ -1524,7 +1529,7 @@ 

          return_value=True,

      )

      def test_submit_normal_vs_scratch(

-         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that submitting a normal build with the same NSV as a previously
@@ -1549,8 +1554,7 @@ 

          # make sure scratch build has expected context with unique suffix

          assert module_build.context == "9c690d0e_1"

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main([], stop)

+         self.run_scheduler(db_session)

          # Post again as a non-scratch build and make sure it succeeds

          post_data["scratch"] = False

          rv2 = self.client.post(post_url, data=json.dumps(post_data))
@@ -1569,7 +1573,7 @@ 

          return_value=True,

      )

      def test_submit_scratch_vs_scratch(

-         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that submitting a scratch build with the same NSV as a previously
@@ -1593,8 +1597,7 @@ 

          # make sure first scratch build has expected context with unique suffix

          assert module_build.context == "9c690d0e_1"

          # Run the backend

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main([], stop)

+         self.run_scheduler(db_session)

          # Post scratch build again and make sure it succeeds

          rv2 = self.client.post(post_url, data=json.dumps(post_data))

          assert rv2.status_code == 201
@@ -1607,7 +1610,7 @@ 

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_build_repo_regen_not_started_batch(

-         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session

      ):

          """

          Tests that if MBS starts a new batch, the concurrent component threshold is met before a
@@ -1633,7 +1636,7 @@ 

          def _stop_condition(message):

              # Stop the backend if the module batch is 2 (where we simulate the concurrent threshold

              # being met). For safety, also stop the backend if the module erroneously completes.

-             module = db.session.query(models.ModuleBuild).get(module_build_id)

+             module = db_session.query(models.ModuleBuild).get(module_build_id)

              return module.batch == 2 or module.state >= models.BUILD_STATES["done"]

  

          with patch(
@@ -1641,14 +1644,14 @@ 

          ) as mock_acct:

              # Once we get to batch 2, then simulate the concurrent threshold being met

              def _at_concurrent_component_threshold(config, session):

-                 return db.session.query(models.ModuleBuild).get(module_build_id).batch == 2

+                 return session.query(models.ModuleBuild).get(module_build_id).batch == 2

  

              mock_acct.side_effect = _at_concurrent_component_threshold

-             module_build_service.scheduler.main([], _stop_condition)

+             self.run_scheduler(db_session, stop_condition=_stop_condition)

  

          # Only module-build-macros should be built

          for build in (

-             db.session.query(models.ComponentBuild).filter_by(module_id=module_build_id).all()

+             db_session.query(models.ComponentBuild).filter_by(module_id=module_build_id).all()

          ):

              if build.package == "module-build-macros":

                  assert build.state == koji.BUILD_STATES["COMPLETE"]
@@ -1658,7 +1661,7 @@ 

  

          # Simulate a random repo regen message that MBS didn't expect

          cleanup_moksha()

-         module = db.session.query(models.ModuleBuild).get(module_build_id)

+         module = db_session.query(models.ModuleBuild).get(module_build_id)

          msgs = [

              module_build_service.messaging.KojiRepoChange(

                  msg_id="a faked internal message", repo_tag=module.koji_tag + "-build"
@@ -1666,16 +1669,16 @@ 

          ]

          db.session.expire_all()

          # Stop after processing the seeded message

-         module_build_service.scheduler.main(msgs, lambda message: True)

+         self.run_scheduler(db_session, msgs, lambda message: True)

          # Make sure the module build didn't fail so that the poller can resume it later

-         module = db.session.query(models.ModuleBuild).get(module_build_id)

+         module = db_session.query(models.ModuleBuild).get(module_build_id)

          assert module.state == models.BUILD_STATES["build"]

  

      @patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)

      @patch("module_build_service.auth.get_user", return_value=user)

      @patch("module_build_service.scm.SCM")

      def test_submit_br_metadata_only_module(

-         self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc

+         self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc, db_session

      ):

          """

          Test that when a build is submitted with a buildrequire without a Koji tag,
@@ -1709,17 +1712,16 @@ 

              assert set(dependencies.keys()) == set(["module-f28-build"])

  

          FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb

-         stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-         module_build_service.scheduler.main([], stop)

+         self.run_scheduler(db_session)

  

-         module = db.session.query(models.ModuleBuild).get(module_build_id)

+         module = db_session.query(models.ModuleBuild).get(module_build_id)

          assert module.state == models.BUILD_STATES["ready"]

  

  

  @patch(

      "module_build_service.config.Config.system", new_callable=PropertyMock, return_value="testlocal"

  )

- class TestLocalBuild:

+ class TestLocalBuild(BaseTestBuild):

      def setup_method(self, test_method):

          FakeModuleBuilder.on_build_cb = None

          FakeModuleBuilder.backend = "testlocal"
@@ -1745,45 +1747,43 @@ 

          return_value=path.join(base_dir, "staged_data", "local_builds"),

      )

      def test_submit_build_local_dependency(

-         self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc

+         self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc, db_session

      ):

          """

          Tests local module build dependency.

          """

-         with app.app_context():

-             module_build_service.utils.load_local_builds(["platform"])

-             FakeSCM(

-                 mocked_scm,

-                 "testmodule",

-                 "testmodule.yaml",

-                 "620ec77321b2ea7b0d67d82992dda3e1d67055b4",

-             )

+         # with app.app_context():

+         module_build_service.utils.load_local_builds(["platform"])

+         FakeSCM(

+             mocked_scm,

+             "testmodule",

+             "testmodule.yaml",

+             "620ec77321b2ea7b0d67d82992dda3e1d67055b4",

+         )

  

-             rv = self.client.post(

-                 "/module-build-service/1/module-builds/",

-                 data=json.dumps({

-                     "branch": "master",

-                     "scmurl": "https://src.stg.fedoraproject.org/modules/"

-                     "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4",

-                 }),

-             )

+         rv = self.client.post(

+             "/module-build-service/1/module-builds/",

+             data=json.dumps({

+                 "branch": "master",

+                 "scmurl": "https://src.stg.fedoraproject.org/modules/"

+                 "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4",

+             }),

+         )

  

-             data = json.loads(rv.data)

-             module_build_id = data["id"]

+         data = json.loads(rv.data)

+         module_build_id = data["id"]

  

-             # Local base-runtime has changed profiles, so we can detect we use

-             # the local one and not the main one.

-             FakeModuleBuilder.DEFAULT_GROUPS = {"srpm-build": set(["bar"]), "build": set(["foo"])}

+         # Local base-runtime has changed profiles, so we can detect we use

+         # the local one and not the main one.

+         FakeModuleBuilder.DEFAULT_GROUPS = {"srpm-build": set(["bar"]), "build": set(["foo"])}

  

-             msgs = []

-             stop = module_build_service.scheduler.make_simple_stop_condition(db.session)

-             module_build_service.scheduler.main(msgs, stop)

+         self.run_scheduler(db_session)

  

-             # All components should be built and module itself should be in "done"

-             # or "ready" state.

-             for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

-                 assert build.state == koji.BUILD_STATES["COMPLETE"]

-                 assert build.module_build.state in [

-                     models.BUILD_STATES["done"],

-                     models.BUILD_STATES["ready"],

-                 ]

+         # All components should be built and module itself should be in "done"

+         # or "ready" state.

+         for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():

+             assert build.state == koji.BUILD_STATES["COMPLETE"]

+             assert build.module_build.state in [

+                 models.BUILD_STATES["done"],

+                 models.BUILD_STATES["ready"],

+             ]

file modified
+17 -17
@@ -39,7 +39,7 @@ 

  import pytest

  from mock import patch, MagicMock

  

- from tests import conf, init_data, reuse_component_init_data, clean_database

+ from tests import conf, init_data, reuse_component_init_data, clean_database, make_module

  

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

  
@@ -853,14 +853,14 @@ 

          with open(self.expected_srpm_file, "w") as f:

              f.write("")

  

-         module_nsvc = dict(

+         self.module_nsvc = dict(

              name="testmodule",

              stream="master",

              version="1",

              context=module_build_service.models.DEFAULT_MODULE_CONTEXT,

          )

  

-         xmd = {

+         self.xmd = {

              "mbs": {

                  "buildrequires": {

                      "modulea": {
@@ -872,14 +872,9 @@ 

                          "ursine_rpms": ["foo-0:1.0-1.fc28", "bar-0:2.0-1.fc28"],

                      },

                  },

-                 "koji_tag": "module-{name}-{stream}-{version}-{context}".format(**module_nsvc),

+                 "koji_tag": "module-{name}-{stream}-{version}-{context}".format(**self.module_nsvc),

              }

          }

-         from tests import make_module

- 

-         self.module_build = make_module(

-             "{name}:{stream}:{version}:{context}".format(**module_nsvc), xmd=xmd

-         )

  

      def teardown_method(self):

          shutil.rmtree(self.tmp_srpm_build_dir)
@@ -887,24 +882,29 @@ 

  

      @patch("tempfile.mkdtemp")

      @patch("module_build_service.builder.KojiModuleBuilder.execute_cmd")

-     def _build_srpm(self, execute_cmd, mkdtemp):

+     def _build_srpm(self, db_session, execute_cmd, mkdtemp):

+         module_build = make_module(

+             db_session,

+             "{name}:{stream}:{version}:{context}".format(**self.module_nsvc),

+             xmd=self.xmd)

+ 

          mkdtemp.return_value = self.tmp_srpm_build_dir

-         return KojiModuleBuilder.get_disttag_srpm("disttag", self.module_build)

+         return KojiModuleBuilder.get_disttag_srpm("disttag", module_build)

  

-     def test_return_srpm_file(self):

-         srpm_file = self._build_srpm()

+     def test_return_srpm_file(self, db_session):

+         srpm_file = self._build_srpm(db_session)

          assert self.expected_srpm_file == srpm_file

  

-     def test_filtered_rpms_are_added(self):

-         self._build_srpm()

+     def test_filtered_rpms_are_added(self, db_session):

+         self._build_srpm(db_session)

  

          with open(self.spec_file, "r") as f:

              content = f.read()

          for nevr in ["baz-devel-0:0.1-6.fc28", "baz-doc-0:0.1-6.fc28"]:

              assert KojiModuleBuilder.format_conflicts_line(nevr) + "\n" in content

  

-     def test_ursine_rpms_are_added(self):

-         self._build_srpm()

+     def test_ursine_rpms_are_added(self, db_session):

+         self._build_srpm(db_session)

  

          with open(self.spec_file, "r") as f:

              content = f.read()

file modified
+40 -36
@@ -7,7 +7,7 @@ 

  

  import kobo.rpmlib

  

- from module_build_service import conf, db

+ from module_build_service import conf

  from module_build_service.models import ModuleBuild, ComponentBuild, make_session

  from module_build_service.builder.MockModuleBuilder import MockModuleBuilder

  from module_build_service.utils import import_fake_base_module, load_mmd_file, mmd_to_str
@@ -24,31 +24,6 @@ 

          shutil.rmtree(self.resultdir)

  

      def _create_module_with_filters(self, session, batch, state):

-         comp_builds = [

-             {

-                 "module_id": 2,

-                 "package": "ed",

-                 "format": "rpms",

-                 "scmurl": (

-                     "https://src.fedoraproject.org/rpms/ed"

-                     "?#01bf8330812fea798671925cc537f2f29b0bd216"

-                 ),

-                 "batch": 2,

-                 "ref": "01bf8330812fea798671925cc537f2f29b0bd216",

-             },

-             {

-                 "module_id": 2,

-                 "package": "mksh",

-                 "format": "rpms",

-                 "scmurl": (

-                     "https://src.fedoraproject.org/rpms/mksh"

-                     "?#f70fd11ddf96bce0e2c64309706c29156b39141d"

-                 ),

-                 "batch": 3,

-                 "ref": "f70fd11ddf96bce0e2c64309706c29156b39141d",

-             },

-         ]

- 

          base_dir = os.path.abspath(os.path.dirname(__file__))

          mmd = load_mmd_file(

              os.path.join(base_dir, "..", "staged_data", "testmodule-with-filters.yaml"))
@@ -102,11 +77,37 @@ 

          module.koji_tag = "module-mbs-testmodule-test-20171027111452"

          module.batch = batch

          session.add(module)

+         session.commit()

+ 

+         comp_builds = [

+             {

+                 "module_id": module.id,

+                 "state": state,

+                 "package": "ed",

+                 "format": "rpms",

+                 "scmurl": (

+                     "https://src.fedoraproject.org/rpms/ed"

+                     "?#01bf8330812fea798671925cc537f2f29b0bd216"

+                 ),

+                 "batch": 2,

+                 "ref": "01bf8330812fea798671925cc537f2f29b0bd216",

+             },

+             {

+                 "module_id": module.id,

+                 "state": state,

+                 "package": "mksh",

+                 "format": "rpms",

+                 "scmurl": (

+                     "https://src.fedoraproject.org/rpms/mksh"

+                     "?#f70fd11ddf96bce0e2c64309706c29156b39141d"

+                 ),

+                 "batch": 3,

+                 "ref": "f70fd11ddf96bce0e2c64309706c29156b39141d",

+             },

+         ]

  

          for build in comp_builds:

-             cb = ComponentBuild(**dict(build, format="rpms", state=state))

-             session.add(cb)

-             session.commit()

+             session.add(ComponentBuild(**build))

          session.commit()

  

          return module
@@ -186,9 +187,6 @@ 

      def setup_method(self, test_method):

          clean_database(add_platform_module=False)

          import_fake_base_module("platform:f29:1:000000")

-         self.platform = ModuleBuild.get_last_build_in_stream(db.session, "platform", "f29")

-         self.foo = make_module("foo:1:1:1", {"platform": ["f29"]}, {"platform": ["f29"]})

-         self.app = make_module("app:1:1:1", {"platform": ["f29"]}, {"platform": ["f29"]})

  

      @mock.patch("module_build_service.conf.system", new="mock")

      @mock.patch(
@@ -205,19 +203,25 @@ 

          "module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config"

      )

      def test_buildroot_add_repos(

-         self, write_config, load_config, patched_open, base_module_repofiles

+         self, write_config, load_config, patched_open, base_module_repofiles, db_session

      ):

+         platform = ModuleBuild.get_last_build_in_stream(db_session, "platform", "f29")

+         foo = make_module(

+             db_session, "foo:1:1:1", {"platform": ["f29"]}, {"platform": ["f29"]})

+         app = make_module(

+             db_session, "app:1:1:1", {"platform": ["f29"]}, {"platform": ["f29"]})

+ 

          patched_open.side_effect = [

              mock.mock_open(read_data="[fake]\nrepofile 1\n").return_value,

              mock.mock_open(read_data="[fake]\nrepofile 2\n").return_value,

              mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value,

          ]

  

-         builder = MockModuleBuilder("user", self.app, conf, "module-app", [])

+         builder = MockModuleBuilder("user", app, conf, "module-app", [])

  

          dependencies = {

-             "repofile://": [self.platform.mmd()],

-             "repofile:///etc/yum.repos.d/foo.repo": [self.foo.mmd(), self.app.mmd()],

+             "repofile://": [platform.mmd()],

+             "repofile:///etc/yum.repos.d/foo.repo": [foo.mmd(), app.mmd()],

          }

  

          builder.buildroot_add_repos(dependencies)

file modified
+18 -8
@@ -20,16 +20,19 @@ 

  import pytest

  from mock import patch, mock_open, ANY

  

- from module_build_service import conf

+ from module_build_service import app, conf

  from module_build_service.manage import retire, build_module_locally

  from module_build_service.models import BUILD_STATES, ModuleBuild, make_session

- from tests.test_models import init_data

+ from tests.test_models import clean_database, init_data

  

  

  class TestMBSManage:

      def setup_method(self, test_method):

          init_data()

  

+     def teardown_method(self, test_method):

+         clean_database(False, False)

+ 

      @pytest.mark.parametrize(

          ("identifier", "is_valid"),

          (
@@ -85,10 +88,10 @@ 

                  session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()

              )

  

-         assert len(retired_module_builds) == changed_count

-         for x in range(changed_count):

-             assert retired_module_builds[x].id == module_builds[x].id

-             assert retired_module_builds[x].state == BUILD_STATES["garbage"]

+             assert len(retired_module_builds) == changed_count

+             for x in range(changed_count):

+                 assert retired_module_builds[x].id == module_builds[x].id

+                 assert retired_module_builds[x].state == BUILD_STATES["garbage"]

  

      @pytest.mark.parametrize(

          ("confirm_prompt", "confirm_arg", "confirm_expected"),
@@ -130,8 +133,15 @@ 

      @patch("module_build_service.manage.conf.set_item")

      def test_build_module_locally_set_stream(

              self, conf_set_item, main, submit_module_build_from_yaml, patched_open):

-         build_module_locally(

-             yaml_file="./fake.yaml", default_streams=["platform:el8"], stream="foo")

+         # build_module_locally changes database uri to a local SQLite database file.

+         # Restore the uri to original one in order to not impact the database

+         # session in subsequent tests.

+         original_db_uri = app.config['SQLALCHEMY_DATABASE_URI']

+         try:

+             build_module_locally(

+                 yaml_file="./fake.yaml", default_streams=["platform:el8"], stream="foo")

+         finally:

+             app.config['SQLALCHEMY_DATABASE_URI'] = original_db_uri

  

          submit_module_build_from_yaml.assert_called_once_with(

              ANY, ANY, {"default_streams": {"platform": "el8"}, "local_build": True},

@@ -122,9 +122,9 @@ 

          init_data_contexts(contexts=True)

          with make_session(conf) as session:

              builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")

-             builds = [

+             builds = sorted([

                  "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds

-             ]

+             ])

              assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]

  

      def test_get_last_build_in_all_stream_last_version(self):
@@ -162,16 +162,25 @@ 

          name:stream_ver modules have different versions.

          """

          clean_database(False)

-         make_module("platform:f29.1.0:10:old_version", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.1.0:15:c11.another", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.1.0:15:c11", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.2.0:0:old_version", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.2.0:1:c11", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])

  

-         with make_session(conf) as session:

-             builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290200)

+         with make_session(conf) as db_session:

+             make_module(

+                 db_session, "platform:f29.1.0:10:old_version", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.1.0:15:c11.another", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.1.0:15:c11", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.2.0:0:old_version", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.2.0:1:c11", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])

+ 

+             builds = ModuleBuild.get_last_builds_in_stream_version_lte(

+                 db_session, "platform", 290200)

              builds = set([

                  "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)

                  for build in builds
@@ -184,21 +193,25 @@ 

  

      def test_get_module_count(self):

          clean_database(False)

-         make_module("platform:f29.1.0:10:c11", {}, {})

-         make_module("platform:f29.1.0:10:c12", {}, {})

-         with make_session(conf) as session:

-             count = ModuleBuild.get_module_count(session, name="platform")

+         with make_session(conf) as db_session:

+             make_module(db_session, "platform:f29.1.0:10:c11", {}, {})

+             make_module(db_session, "platform:f29.1.0:10:c12", {}, {})

+ 

+             count = ModuleBuild.get_module_count(db_session, name="platform")

              assert count == 2

  

      def test_add_virtual_streams_filter(self):

          clean_database(False)

-         make_module("platform:f29.1.0:10:c1", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.1.0:15:c1", {}, {}, virtual_streams=["f29"])

-         make_module("platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f28", "f29"])

-         make_module("platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f30"])

  

-         with make_session(conf) as session:

-             query = session.query(ModuleBuild).filter_by(name="platform")

-             query = ModuleBuild._add_virtual_streams_filter(session, query, ["f28", "f29"])

+         with make_session(conf) as db_session:

+             make_module(db_session, "platform:f29.1.0:10:c1", {}, {}, virtual_streams=["f29"])

+             make_module(db_session, "platform:f29.1.0:15:c1", {}, {}, virtual_streams=["f29"])

+             make_module(

+                 db_session, "platform:f29.3.0:15:old_version", {}, {},

+                 virtual_streams=["f28", "f29"])

+             make_module(db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f30"])

+ 

+             query = db_session.query(ModuleBuild).filter_by(name="platform")

+             query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])

              count = query.count()

              assert count == 3

file modified
+10 -8
@@ -71,10 +71,11 @@ 

  @mock.patch("module_build_service.monitor.builder_success_counter.inc")

  def test_monitor_state_changing_success(succ_cnt, failed_cnt):

      conf = mbs_config.Config(TestConfiguration)

-     b = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})

-     b.transition(conf, models.BUILD_STATES["wait"])

-     b.transition(conf, models.BUILD_STATES["build"])

-     b.transition(conf, models.BUILD_STATES["done"])

+     with models.make_session(conf) as db_session:

+         b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+         b.transition(conf, models.BUILD_STATES["wait"])

+         b.transition(conf, models.BUILD_STATES["build"])

+         b.transition(conf, models.BUILD_STATES["done"])

      succ_cnt.assert_called_once()

      failed_cnt.assert_not_called()

  
@@ -84,9 +85,10 @@ 

  def test_monitor_state_changing_failure(succ_cnt, failed_cnt):

      failure_type = "user"

      conf = mbs_config.Config(TestConfiguration)

-     b = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})

-     b.transition(conf, models.BUILD_STATES["wait"])

-     b.transition(conf, models.BUILD_STATES["build"])

-     b.transition(conf, models.BUILD_STATES["failed"], failure_type=failure_type)

+     with models.make_session(conf) as db_session:

+         b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+         b.transition(conf, models.BUILD_STATES["wait"])

+         b.transition(conf, models.BUILD_STATES["build"])

+         b.transition(conf, models.BUILD_STATES["failed"], failure_type=failure_type)

      succ_cnt.assert_not_called()

      failed_cnt.assert_called_once_with(reason=failure_type)

file modified
+33 -26
@@ -27,7 +27,7 @@ 

  import pytest

  

  import module_build_service.resolver as mbs_resolver

- from module_build_service import app, db, models, utils, Modulemd

+ from module_build_service import app, conf, db, models, utils, Modulemd

  from module_build_service.utils import import_mmd, load_mmd_file, mmd_to_str

  from module_build_service.models import ModuleBuild

  import tests
@@ -43,33 +43,37 @@ 

      def test_get_buildrequired_modulemds(self):

          mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

-         import_mmd(db.session, mmd)

-         platform_f300103 = ModuleBuild.query.filter_by(stream="f30.1.3").one()

-         mmd = tests.make_module("testmodule:master:20170109091357:123", store_to_db=False)

-         build = ModuleBuild(

-             name="testmodule",

-             stream="master",

-             version=20170109091357,

-             state=5,

-             build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",

-             runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",

-             context="7c29193d",

-             koji_tag="module-testmodule-master-20170109091357-7c29193d",

-             scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",

-             batch=3,

-             owner="Dr. Pepper",

-             time_submitted=datetime(2018, 11, 15, 16, 8, 18),

-             time_modified=datetime(2018, 11, 15, 16, 19, 35),

-             rebuild_strategy="changed-and-after",

-             modulemd=mmd_to_str(mmd),

-         )

-         build.buildrequires.append(platform_f300103)

-         db.session.add(build)

-         db.session.commit()

+         with models.make_session(conf) as db_session:

+             import_mmd(db_session, mmd)

+             platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()

+             mmd = tests.make_module(db_session,

+                                     "testmodule:master:20170109091357:123",

+                                     store_to_db=False)

+             build = ModuleBuild(

+                 name="testmodule",

+                 stream="master",

+                 version=20170109091357,

+                 state=5,

+                 build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",

+                 runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",

+                 context="7c29193d",

+                 koji_tag="module-testmodule-master-20170109091357-7c29193d",

+                 scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",

+                 batch=3,

+                 owner="Dr. Pepper",

+                 time_submitted=datetime(2018, 11, 15, 16, 8, 18),

+                 time_modified=datetime(2018, 11, 15, 16, 19, 35),

+                 rebuild_strategy="changed-and-after",

+                 modulemd=mmd_to_str(mmd),

+             )

+             build.buildrequires.append(platform_f300103)

+             db_session.add(build)

+             db_session.commit()

+ 

+             platform_nsvc = platform_f300103.mmd().get_nsvc()

  

          resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

-         result = resolver.get_buildrequired_modulemds(

-             "testmodule", "master", platform_f300103.mmd().get_nsvc())

+         result = resolver.get_buildrequired_modulemds("testmodule", "master", platform_nsvc)

          nsvcs = set([m.get_nsvc() for m in result])

          assert nsvcs == set(["testmodule:master:20170109091357:123"])

  
@@ -135,6 +139,9 @@ 

          module.version = str(mmd.get_version())

          module.koji_tag = "module-ae2adf69caf0e1b6"

  

+         db.session.add(module)

+         db.session.commit()

+ 

          resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

          result = resolver.get_module_build_dependencies(

              "testmodule2", "master", "20180123171545", "c40c156c").keys()

file modified
+25 -23
@@ -24,6 +24,7 @@ 

  

  import module_build_service.resolver as mbs_resolver

  import module_build_service.utils

+ from module_build_service import conf, models

  from module_build_service.utils.general import mmd_to_str

  import module_build_service.models

  import tests
@@ -377,29 +378,30 @@ 

  

          with patch.object(resolver, "session") as session:

              session.get.return_value = Mock(ok=True)

-             session.get.return_value.json.return_value = {

-                 "items": [

-                     {

-                         "name": "nodejs",

-                         "stream": "10",

-                         "version": 1,

-                         "context": "c1",

-                         "modulemd": mmd_to_str(

-                             tests.make_module("nodejs:10:1:c1", store_to_db=False),

-                         ),

-                     },

-                     {

-                         "name": "nodejs",

-                         "stream": "10",

-                         "version": 2,

-                         "context": "c1",

-                         "modulemd": mmd_to_str(

-                             tests.make_module("nodejs:10:2:c1", store_to_db=False),

-                         ),

-                     },

-                 ],

-                 "meta": {"next": None},

-             }

+             with models.make_session(conf) as db_session:

+                 session.get.return_value.json.return_value = {

+                     "items": [

+                         {

+                             "name": "nodejs",

+                             "stream": "10",

+                             "version": 1,

+                             "context": "c1",

+                             "modulemd": mmd_to_str(

+                                 tests.make_module(db_session, "nodejs:10:1:c1", store_to_db=False),

+                             ),

+                         },

+                         {

+                             "name": "nodejs",

+                             "stream": "10",

+                             "version": 2,

+                             "context": "c1",

+                             "modulemd": mmd_to_str(

+                                 tests.make_module(db_session, "nodejs:10:2:c1", store_to_db=False),

+                             ),

+                         },

+                     ],

+                     "meta": {"next": None},

+                 }

  

              result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")

  

@@ -121,10 +121,11 @@ 

          clean_database()

  

          # This build should be queried and transformed to ready state

-         module_build = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})

+         module_build = make_module(db.session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

          module_build.transition(

              conf, BUILD_STATES["done"], "Move to done directly for running test."

          )

+         db.session.commit()

  

          # Assert this call below

          first_publish_call = call(
@@ -134,8 +135,6 @@ 

              conf=conf,

          )

  

-         db.session.refresh(module_build)

- 

          ClientSession.return_value.getBuild.return_value = {

              "extra": {"typeinfo": {"module": {"module_build_service_id": module_build.id}}}

          }

@@ -27,7 +27,7 @@ 

  import os

  import koji

  import pytest

- from tests import conf, db, app, scheduler_init_data

+ from tests import conf, db, scheduler_init_data

  import module_build_service.resolver

  from module_build_service import build_logs, Modulemd

  from module_build_service.utils.general import load_mmd_file
@@ -94,47 +94,46 @@ 

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.resolver.GenericResolver")

      def test_new_repo_called_when_macros_reused(

-         self, generic_resolver, resolver, create_builder, dbg

+         self, generic_resolver, resolver, create_builder, dbg, db_session

      ):

          """

          Test that newRepo is called when module-build-macros build is reused.

          """

-         with app.app_context():

-             scheduler_init_data()

-             koji_session = mock.MagicMock()

-             koji_session.newRepo.return_value = 123456

+         scheduler_init_data(db_session)

+         koji_session = mock.MagicMock()

+         koji_session.newRepo.return_value = 123456

  

-             builder = mock.MagicMock()

-             builder.koji_session = koji_session

-             builder.module_build_tag = {"name": "module-123-build"}

-             builder.get_disttag_srpm.return_value = "some srpm disttag"

-             builder.build.return_value = (

-                 1234,

-                 koji.BUILD_STATES["COMPLETE"],

-                 "",

-                 "module-build-macros-1-1",

-             )

-             create_builder.return_value = builder

+         builder = mock.MagicMock()

+         builder.koji_session = koji_session

+         builder.module_build_tag = {"name": "module-123-build"}

+         builder.get_disttag_srpm.return_value = "some srpm disttag"

+         builder.build.return_value = (

+             1234,

+             koji.BUILD_STATES["COMPLETE"],

+             "",

+             "module-build-macros-1-1",

+         )

+         create_builder.return_value = builder

  

-             resolver = mock.MagicMock()

-             resolver.backend = "db"

-             resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

+         resolver = mock.MagicMock()

+         resolver.backend = "db"

+         resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state")

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db.session, msg=msg)

-                 koji_session.newRepo.assert_called_once_with("module-123-build")

+         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state")

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, session=db_session, msg=msg)

+             koji_session.newRepo.assert_called_once_with("module-123-build")

  

-             # When module-build-macros is reused, it still has to appear only

-             # once in database.

-             builds_count = (

-                 db.session.query(ComponentBuild)

-                 .filter_by(package="module-build-macros", module_id=2)

-                 .count()

-             )

-             assert builds_count == 1

+         # When module-build-macros is reused, it still has to appear only

+         # once in database.

+         builds_count = (

+             db.session.query(ComponentBuild)

+             .filter_by(package="module-build-macros", module_id=2)

+             .count()

+         )

+         assert builds_count == 1

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -144,38 +143,37 @@ 

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.resolver.GenericResolver")

      def test_new_repo_not_called_when_macros_not_reused(

-         self, generic_resolver, resolver, create_builder, dbg

+         self, generic_resolver, resolver, create_builder, dbg, db_session

      ):

          """

          Test that newRepo is called everytime for module-build-macros

          """

-         with app.app_context():

-             scheduler_init_data()

-             koji_session = mock.MagicMock()

-             koji_session.newRepo.return_value = 123456

+         scheduler_init_data(db_session)

+         koji_session = mock.MagicMock()

+         koji_session.newRepo.return_value = 123456

  

-             builder = mock.MagicMock()

-             builder.koji_session = koji_session

-             builder.module_build_tag = {"name": "module-123-build"}

-             builder.get_disttag_srpm.return_value = "some srpm disttag"

-             builder.build.return_value = (

-                 1234,

-                 koji.BUILD_STATES["BUILDING"],

-                 "",

-                 "module-build-macros-1-1",

-             )

-             create_builder.return_value = builder

+         builder = mock.MagicMock()

+         builder.koji_session = koji_session

+         builder.module_build_tag = {"name": "module-123-build"}

+         builder.get_disttag_srpm.return_value = "some srpm disttag"

+         builder.build.return_value = (

+             1234,

+             koji.BUILD_STATES["BUILDING"],

+             "",

+             "module-build-macros-1-1",

+         )

+         create_builder.return_value = builder

  

-             resolver = mock.MagicMock()

-             resolver.backend = "db"

-             resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

+         resolver = mock.MagicMock()

+         resolver.backend = "db"

+         resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

  

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state")

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db.session, msg=msg)

-                 assert koji_session.newRepo.called

+         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state")

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, session=db_session, msg=msg)

+             assert koji_session.newRepo.called

  

      @patch(

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -185,44 +183,43 @@ 

      @patch("module_build_service.resolver.DBResolver")

      @patch("module_build_service.resolver.GenericResolver")

      def test_set_cg_build_koji_tag_fallback_to_default(

-         self, generic_resolver, resolver, create_builder, dbg

+         self, generic_resolver, resolver, create_builder, dbg, db_session

      ):

          """

          Test that build.cg_build_koji_tag fallbacks to default tag.

          """

-         with app.app_context():

-             base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")

+         base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")

  

-             scheduler_init_data()

-             koji_session = mock.MagicMock()

-             koji_session.newRepo.return_value = 123456

+         scheduler_init_data(db_session)

+         koji_session = mock.MagicMock()

+         koji_session.newRepo.return_value = 123456

  

-             builder = mock.MagicMock()

-             builder.koji_session = koji_session

-             builder.module_build_tag = {"name": "module-123-build"}

-             builder.get_disttag_srpm.return_value = "some srpm disttag"

-             builder.build.return_value = (

-                 1234,

-                 koji.BUILD_STATES["BUILDING"],

-                 "",

-                 "module-build-macros-1-1",

-             )

-             create_builder.return_value = builder

+         builder = mock.MagicMock()

+         builder.koji_session = koji_session

+         builder.module_build_tag = {"name": "module-123-build"}

+         builder.get_disttag_srpm.return_value = "some srpm disttag"

+         builder.build.return_value = (

+             1234,

+             koji.BUILD_STATES["BUILDING"],

+             "",

+             "module-build-macros-1-1",

+         )

+         create_builder.return_value = builder

  

-             resolver = mock.MagicMock()

-             resolver.backend = "db"

-             resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

-             resolver.get_module_build_dependencies.return_value = {

-                 "module-bootstrap-tag": [base_mmd]

-             }

+         resolver = mock.MagicMock()

+         resolver.backend = "db"

+         resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

+         resolver.get_module_build_dependencies.return_value = {

+             "module-bootstrap-tag": [base_mmd]

+         }

  

-             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                 msg = module_build_service.messaging.MBSModule(

-                     msg_id=None, module_build_id=2, module_build_state="some state")

-                 module_build_service.scheduler.handlers.modules.wait(

-                     config=conf, session=db.session, msg=msg)

-                 module_build = ModuleBuild.query.filter_by(id=2).one()

-                 assert module_build.cg_build_koji_tag == "modular-updates-candidate"

+         with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

+             msg = module_build_service.messaging.MBSModule(

+                 msg_id=None, module_build_id=2, module_build_state="some state")

+             module_build_service.scheduler.handlers.modules.wait(

+                 config=conf, session=db_session, msg=msg)

+             module_build = ModuleBuild.query.filter_by(id=2).one()

+             assert module_build.cg_build_koji_tag == "modular-updates-candidate"

  

      @pytest.mark.parametrize(

          "koji_cg_tag_build,expected_cg_koji_build_tag",
@@ -252,47 +249,47 @@ 

          dbg,

          koji_cg_tag_build,

          expected_cg_koji_build_tag,

+         db_session,

      ):

          """

          Test that build.cg_build_koji_tag is set.

          """

-         with app.app_context():

-             base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")

+         base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")

  

-             scheduler_init_data()

-             koji_session = mock.MagicMock()

-             koji_session.newRepo.return_value = 123456

+         scheduler_init_data(db_session)

+         koji_session = mock.MagicMock()

+         koji_session.newRepo.return_value = 123456

  

-             builder = mock.MagicMock()

-             builder.koji_session = koji_session

-             builder.module_build_tag = {"name": "module-123-build"}

-             builder.get_disttag_srpm.return_value = "some srpm disttag"

-             builder.build.return_value = (

-                 1234,

-                 koji.BUILD_STATES["BUILDING"],

-                 "",

-                 "module-build-macros-1-1",

-             )

-             create_builder.return_value = builder

+         builder = mock.MagicMock()

+         builder.koji_session = koji_session

+         builder.module_build_tag = {"name": "module-123-build"}

+         builder.get_disttag_srpm.return_value = "some srpm disttag"

+         builder.build.return_value = (

+             1234,

+             koji.BUILD_STATES["BUILDING"],

+             "",

+             "module-build-macros-1-1",

+         )

+         create_builder.return_value = builder

  

-             resolver = mock.MagicMock()

-             resolver.backend = "db"

-             resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

-             resolver.get_module_build_dependencies.return_value = {

-                 "module-bootstrap-tag": [base_mmd]

-             }

+         resolver = mock.MagicMock()

+         resolver.backend = "db"

+         resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"

+         resolver.get_module_build_dependencies.return_value = {

+             "module-bootstrap-tag": [base_mmd]

+         }

  

-             with patch.object(

-                 module_build_service.scheduler.handlers.modules.conf,

-                 "koji_cg_tag_build",

-                 new=koji_cg_tag_build,

-             ):

-                 with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

-                     msg = module_build_service.messaging.MBSModule(

-                         msg_id=None, module_build_id=2, module_build_state="some state"

-                     )

-                     module_build_service.scheduler.handlers.modules.wait(

-                         config=conf, session=db.session, msg=msg

-                     )

-                     module_build = ModuleBuild.query.filter_by(id=2).one()

-                     assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

+         with patch.object(

+             module_build_service.scheduler.handlers.modules.conf,

+             "koji_cg_tag_build",

+             new=koji_cg_tag_build,

+         ):

+             with patch.object(module_build_service.resolver, "system_resolver", new=resolver):

+                 msg = module_build_service.messaging.MBSModule(

+                     msg_id=None, module_build_id=2, module_build_state="some state"

+                 )

+                 module_build_service.scheduler.handlers.modules.wait(

+                     config=conf, session=db_session, msg=msg

+                 )

+                 module_build = ModuleBuild.query.filter_by(id=2).one()

+                 assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

@@ -25,20 +25,21 @@ 

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.repos

  import module_build_service.models

- from tests import conf, db, app, scheduler_init_data

+ from tests import conf, db, scheduler_init_data

  

  

  class TestRepoDone:

+ 

      @mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event")

-     def test_no_match(self, from_repo_done_event):

+     def test_no_match(self, from_repo_done_event, db_session):

          """ Test that when a repo msg hits us and we have no match,

          that we do nothing gracefully.

          """

-         scheduler_init_data()

+         scheduler_init_data(db_session)

          from_repo_done_event.return_value = None

          msg = module_build_service.messaging.KojiRepoChange(

              "no matches for this...", "2016-some-nonexistent-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

  

      @mock.patch(

          "module_build_service.builder.KojiModuleBuilder."
@@ -65,17 +66,17 @@ 

          "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"

      )

      def test_a_single_match(

-         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea

+         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, db_session

      ):

          """ Test that when a repo msg hits us and we have a single match.

          """

-         scheduler_init_data()

+         scheduler_init_data(db_session)

          get_session.return_value = mock.Mock(), "development"

          build_fn.return_value = 1234, 1, "", None

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -110,11 +111,12 @@ 

          "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"

      )

      def test_a_single_match_finalize(

-         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer

+         self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer,

+         db_session

      ):

          """ Test that when a repo msg hits us and we have a single match.

          """

-         scheduler_init_data(tangerine_state=1)

+         scheduler_init_data(db_session, tangerine_state=1)

          get_session.return_value = mock.Mock(), "development"

          build_fn.return_value = 1234, 1, "", None

  
@@ -128,14 +130,14 @@ 

              # Check that the time_completed is set in the time when

              # finalizer is called.

              assert succeeded is True

-             module_build = module_build_service.models.ModuleBuild.query.get(2)

+             module_build = db_session.query(module_build_service.models.ModuleBuild).get(2)

              assert module_build.time_completed is not None

  

          finalizer.side_effect = mocked_finalizer

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

  

          finalizer.assert_called_once()

  
@@ -164,18 +166,18 @@ 

          "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"

      )

      def test_a_single_match_build_fail(

-         self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea

+         self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea, db_session

      ):

          """ Test that when a KojiModuleBuilder.build fails, the build is

          marked as failed with proper state_reason.

          """

-         scheduler_init_data()

+         scheduler_init_data(db_session)

          config.return_value = mock.Mock(), "development"

          build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None

  

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

          build_fn.assert_called_once_with(

              artifact_name="tangerine",

              source=(
@@ -183,16 +185,17 @@ 

                  "#fbed359411a1baa08d4a88e0d12d426fbf8f602c"

              ),

          )

-         component_build = (

-             module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one())

+         component_build = db_session.query(

+             module_build_service.models.ComponentBuild

+         ).filter_by(package="tangerine").one()

          assert component_build.state_reason == "Failed to submit artifact tangerine to Koji"

  

      @mock.patch("module_build_service.scheduler.handlers.repos.log.info")

-     def test_erroneous_regen_repo_received(self, mock_log_info):

+     def test_erroneous_regen_repo_received(self, mock_log_info, db_session):

          """ Test that when an unexpected KojiRepoRegen message is received, the module doesn't

          complete or go to the next build batch.

          """

-         scheduler_init_data(1)

+         scheduler_init_data(db_session, 1)

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

          component_build = (
@@ -200,7 +203,7 @@ 

          component_build.tagged = False

          db.session.add(component_build)

          db.session.commit()

-         module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)

+         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

          mock_log_info.assert_called_with(

              "Ignoring repo regen, because not all components are tagged."

          )
@@ -226,19 +229,19 @@ 

          "module_build_service.builder.GenericBuilder.default_buildroot_groups",

          return_value={"build": [], "srpm-build": []},

      )

-     def test_failed_component_build(self, dbg, connect, build_fn, config, ready, list_tasks_fn):

+     def test_failed_component_build(

+         self, dbg, connect, build_fn, config, ready, list_tasks_fn, db_session

+     ):

          """ Test that when a KojiModuleBuilder.build fails, the build is

          marked as failed with proper state_reason.

          """

-         with app.app_context():

-             scheduler_init_data(3)

-             config.return_value = mock.Mock(), "development"

-             build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None

- 

-             msg = module_build_service.messaging.KojiRepoChange(

-                 "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

-             module_build_service.scheduler.handlers.repos.done(

-                 config=conf, session=db.session, msg=msg)

-             module_build = module_build_service.models.ModuleBuild.query.get(2)

- 

-             assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

+         scheduler_init_data(db_session, 3)

+         config.return_value = mock.Mock(), "development"

+         build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None

+ 

+         msg = module_build_service.messaging.KojiRepoChange(

+             "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

+         module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

+         module_build = module_build_service.models.ModuleBuild.query.get(2)

+ 

+         assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

@@ -24,11 +24,17 @@ 

  import json

  from mock import patch, Mock

  import pytest

+ from module_build_service import conf

+ from module_build_service.models import make_session

  from module_build_service.utils.greenwave import greenwave

- from tests import make_module

+ from tests import clean_database, make_module

  

  

  class TestGreenwaveQuery():

+ 

+     def setup_method(self, method):

+         clean_database()

+ 

      @patch("module_build_service.utils.greenwave.requests")

      def test_greenwave_query_decision(self, mock_requests):

          resp_status = 200
@@ -55,9 +61,9 @@ 

          response.status_code = resp_status

          mock_requests.post.return_value = response

  

-         fake_build = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})

- 

-         got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")

+         with make_session(conf) as db_session:

+             fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+             got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")

  

          assert got_response == resp_content

          assert json.loads(mock_requests.post.call_args_list[0][1]["data"]) == {
@@ -173,7 +179,8 @@ 

          mock_requests.get.return_value = responses[0]

          mock_requests.post.side_effect = responses[1:]

  

-         fake_build = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})

-         result = greenwave.check_gating(fake_build)

+         with make_session(conf) as db_session:

+             fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})

+             result = greenwave.check_gating(fake_build)

  

          assert result == policies_satisfied

file modified
+48 -32
@@ -20,6 +20,7 @@ 

  from mock import patch, Mock

  

  from module_build_service import conf

+ from module_build_service.models import make_session

  from module_build_service.utils import ursine

  from tests import make_module, clean_database

  
@@ -172,20 +173,31 @@ 

  

          # Defaults to DB resolver, so create fake module builds and store them

          # into database to ensure they can be queried.

-         mmd_name1s2020c = make_module(

-             "name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})

-         mmd_name2s2021c = make_module(

-             "name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})

- 

-         koji_tag = "tag"  # It's ok to use arbitrary tag name.

-         with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):

-             modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)

- 

-         test_nsvcs = [item.get_nsvc() for item in modulemds]

-         test_nsvcs.sort()

- 

-         expected_nsvcs = [mmd_name1s2020c.mmd().get_nsvc(), mmd_name2s2021c.mmd().get_nsvc()]

-         expected_nsvcs.sort()

+         #

+         # Switched to call function make_session to create a

+         # SQLAlchemy database session. Calling db.session causes failure to

+         # read attributes from a ModuleBuild object at following line calling

+         # mmd(). The error is ModuleBuild object is not bound to a Session.

+         # From the behavior of following code, the reason of the error is

+         # mixing use of db.session and make_session, the latter one is called

+         # from function ``get_modulemds_from_ursine_content``.

+         with make_session(conf) as db_session:

+             mmd_name1s2020c = make_module(

+                 db_session,

+                 "name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})

+             mmd_name2s2021c = make_module(

+                 db_session,

+                 "name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})

+ 

+             koji_tag = "tag"  # It's ok to use arbitrary tag name.

+             with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):

+                 modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)

+ 

+             test_nsvcs = [item.get_nsvc() for item in modulemds]

+             test_nsvcs.sort()

+ 

+             expected_nsvcs = [mmd_name1s2020c.mmd().get_nsvc(), mmd_name2s2021c.mmd().get_nsvc()]

+             expected_nsvcs.sort()

  

          session.getExternalRepoList.assert_called_once_with(koji_tag)

          assert expected_nsvcs == test_nsvcs
@@ -197,10 +209,10 @@ 

      @patch.object(conf, "base_module_names", new=["platform"])

      @patch.object(ursine, "find_stream_collision_modules")

      def test_nothing_changed_if_no_base_module_is_in_buildrequires(

-         self, find_stream_collision_modules

+         self, find_stream_collision_modules, db_session

      ):

          xmd = {"mbs": {"buildrequires": {"modulea": {"stream": "master"}}}}

-         fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)

+         fake_mmd = make_module(db_session, "name1:s:2020:c", xmd=xmd, store_to_db=False)

          original_xmd = fake_mmd.get_xmd()

  

          with patch.object(ursine, "log") as log:
@@ -213,7 +225,7 @@ 

      @patch.object(conf, "base_module_names", new=["platform"])

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

      def test_mark_handled_even_if_no_modules_in_ursine_content(

-         self, get_modulemds_from_ursine_content

+         self, get_modulemds_from_ursine_content, db_session

      ):

          xmd = {

              "mbs": {
@@ -223,7 +235,7 @@ 

                  }

              }

          }

-         fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)

+         fake_mmd = make_module(db_session, "name1:s:2020:c", xmd=xmd, store_to_db=False)

          expected_xmd = fake_mmd.get_xmd()

  

          get_modulemds_from_ursine_content.return_value = []
@@ -242,7 +254,7 @@ 

      @patch("module_build_service.resolver.GenericResolver.create")

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

      def test_add_collision_modules(

-         self, ClientSession, resolver_create, get_modulemds_from_ursine_content

+         self, ClientSession, resolver_create, get_modulemds_from_ursine_content, db_session

      ):

          xmd = {

              "mbs": {
@@ -258,20 +270,24 @@ 

                  }

              }

          }

-         fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)

+         fake_mmd = make_module(db_session, "name1:s:2020:c", xmd=xmd, store_to_db=False)

  

          def mock_get_ursine_modulemds(koji_tag):

              if koji_tag == "module-rhel-8.0-build":

                  return [

                      # This is the one

-                     make_module("modulea:10:20180813041838:5ea3b708", store_to_db=False),

-                     make_module("moduleb:1.0:20180113042038:6ea3b105", store_to_db=False),

+                     make_module(

+                         db_session, "modulea:10:20180813041838:5ea3b708", store_to_db=False),

+                     make_module(

+                         db_session, "moduleb:1.0:20180113042038:6ea3b105", store_to_db=False),

                  ]

              if koji_tag == "module-project-1.0-build":

                  return [

                      # Both of them are the collided modules

-                     make_module("bar:6:20181013041838:817fa3a8", store_to_db=False),

-                     make_module("foo:2:20180113041838:95f078a1", store_to_db=False),

+                     make_module(

+                         db_session, "bar:6:20181013041838:817fa3a8", store_to_db=False),

+                     make_module(

+                         db_session, "foo:2:20180113041838:95f078a1", store_to_db=False),

                  ]

  

          get_modulemds_from_ursine_content.side_effect = mock_get_ursine_modulemds
@@ -335,22 +351,22 @@ 

          assert not ursine.find_stream_collision_modules({}, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

-     def test_no_collisions_found(self, get_modulemds_from_ursine_content):

+     def test_no_collisions_found(self, get_modulemds_from_ursine_content, db_session):

          xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}

          get_modulemds_from_ursine_content.return_value = [

-             make_module("moduler:1:1:c1", store_to_db=False),

-             make_module("modules:2:1:c2", store_to_db=False),

-             make_module("modulet:3:1:c3", store_to_db=False),

+             make_module(db_session, "moduler:1:1:c1", store_to_db=False),

+             make_module(db_session, "modules:2:1:c2", store_to_db=False),

+             make_module(db_session, "modulet:3:1:c3", store_to_db=False),

          ]

          assert [] == ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")

  

      @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")

-     def test_collision_modules_are_found(self, get_modulemds_from_ursine_content):

+     def test_collision_modules_are_found(self, get_modulemds_from_ursine_content, db_session):

          xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}

          fake_modules = [

-             make_module("moduler:1:1:c1", store_to_db=False),

-             make_module("moduleb:6:1:c2", store_to_db=False),

-             make_module("modulet:3:1:c3", store_to_db=False),

+             make_module(db_session, "moduler:1:1:c1", store_to_db=False),

+             make_module(db_session, "moduleb:6:1:c2", store_to_db=False),

+             make_module(db_session, "modulet:3:1:c3", store_to_db=False),

          ]

          get_modulemds_from_ursine_content.return_value = fake_modules

  

file modified
+39 -35
@@ -446,26 +446,30 @@ 

          assert release_one == "module+2+b8645bbb"

          assert release_two == "module+2+17e35784"

  

-     def test_get_rpm_release_platform_stream(self):

-         scheduler_init_data(1)

-         build_one = models.ModuleBuild.query.get(2)

+     def test_get_rpm_release_platform_stream(self, db_session):

+         scheduler_init_data(db_session, 1)

+         build_one = db_session.query(models.ModuleBuild).get(2)

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "module+f28+2+814cfa39"

  

-     def test_get_rpm_release_platform_stream_override(self):

-         scheduler_init_data(1)

+     def test_get_rpm_release_platform_stream_override(self, db_session):

+         scheduler_init_data(db_session, 1)

  

          # Set the disttag_marking override on the platform

-         platform = models.ModuleBuild.query.filter_by(name="platform", stream="f28").first()

+         platform = (

+             db_session.query(models.ModuleBuild)

+             .filter_by(name="platform", stream="f28")

+             .first()

+         )

          platform_mmd = platform.mmd()

          platform_xmd = platform_mmd.get_xmd()

          platform_xmd["mbs"]["disttag_marking"] = "fedora28"

          platform_mmd.set_xmd(platform_xmd)

          platform.modulemd = mmd_to_str(platform_mmd)

-         db.session.add(platform)

-         db.session.commit()

+         db_session.add(platform)

+         db_session.commit()

  

-         build_one = models.ModuleBuild.query.get(2)

+         build_one = db_session.query(models.ModuleBuild).get(2)

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "module+fedora28+2+814cfa39"

  
@@ -474,20 +478,20 @@ 

          new_callable=mock.PropertyMock,

          return_value=["build"],

      )

-     def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn):

+     def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn, db_session):

          """

          Test that when a module buildrequires a module in conf.allowed_privileged_module_names,

          and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag.

          """

-         scheduler_init_data(1)

+         scheduler_init_data(db_session, 1)

          mmd_path = path.abspath(

              path.join(

                  __file__, path.pardir, path.pardir, "staged_data", "build_metadata_module.yaml")

          )

          metadata_mmd = module_build_service.utils.load_mmd_file(mmd_path)

-         module_build_service.utils.import_mmd(db.session, metadata_mmd)

+         module_build_service.utils.import_mmd(db_session, metadata_mmd)

  

-         build_one = models.ModuleBuild.query.get(2)

+         build_one = db_session.query(models.ModuleBuild).get(2)

          mmd = build_one.mmd()

          deps = mmd.get_dependencies()[0]

          deps.add_buildtime_stream("build", "product1.2")
@@ -501,8 +505,8 @@ 

          }

          mmd.set_xmd(xmd)

          build_one.modulemd = mmd_to_str(mmd)

-         db.session.add(build_one)

-         db.session.commit()

+         db_session.add(build_one)

+         db_session.commit()

  

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "module+product12+2+814cfa39"
@@ -516,19 +520,19 @@ 

          assert release_one == "scrmod+2+b8645bbb"

          assert release_two == "scrmod+2+17e35784"

  

-     def test_get_rpm_release_platform_stream_scratch(self):

-         scheduler_init_data(1, scratch=True)

-         build_one = models.ModuleBuild.query.get(2)

+     def test_get_rpm_release_platform_stream_scratch(self, db_session):

+         scheduler_init_data(db_session, 1, scratch=True)

+         build_one = db_session.query(models.ModuleBuild).get(2)

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "scrmod+f28+2+814cfa39"

  

      @patch("module_build_service.utils.submit.get_build_arches")

-     def test_record_module_build_arches(self, get_build_arches):

+     def test_record_module_build_arches(self, get_build_arches, db_session):

          get_build_arches.return_value = ["x86_64", "i686"]

-         scheduler_init_data(1)

-         build = models.ModuleBuild.query.get(2)

+         scheduler_init_data(db_session, 1)

+         build = db_session.query(models.ModuleBuild).get(2)

          build.arches = []

-         module_build_service.utils.record_module_build_arches(build.mmd(), build, db.session)

+         module_build_service.utils.record_module_build_arches(build.mmd(), build, db_session)

  

          arches = set([arch.name for arch in build.arches])

          assert arches == set(get_build_arches.return_value)
@@ -896,15 +900,15 @@ 

          is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1")

          assert is_eol

  

-     def test_get_prefixed_version_f28(self):

-         scheduler_init_data(1)

-         build_one = models.ModuleBuild.query.get(2)

+     def test_get_prefixed_version_f28(self, db_session):

+         scheduler_init_data(db_session, 1)

+         build_one = db_session.query(models.ModuleBuild).get(2)

          v = module_build_service.utils.submit.get_prefixed_version(build_one.mmd())

          assert v == 2820180205135154

  

-     def test_get_prefixed_version_fl701(self):

-         scheduler_init_data(1)

-         build_one = models.ModuleBuild.query.get(2)

+     def test_get_prefixed_version_fl701(self, db_session):

+         scheduler_init_data(db_session, 1)

+         build_one = db_session.query(models.ModuleBuild).get(2)

          mmd = build_one.mmd()

          xmd = mmd.get_xmd()

          xmd["mbs"]["buildrequires"]["platform"]["stream"] = "fl7.0.1-beta"
@@ -919,18 +923,19 @@ 

          build adds new MSE build (it means there are new expanded

          buildrequires).

          """

-         build = make_module("foo:stream:0:c1", {}, {})

-         assert build.state == models.BUILD_STATES["ready"]

+         with models.make_session(conf) as db_session:

+             build = make_module(db_session, "foo:stream:0:c1", {}, {})

+             assert build.state == models.BUILD_STATES["ready"]

  

-         mmd1 = build.mmd()

-         mmd2 = build.mmd()

-         mmd2.set_context("c2")

+             mmd1 = build.mmd()

+             mmd2 = build.mmd()

  

+         mmd2.set_context("c2")

          generate_expanded_mmds.return_value = [mmd1, mmd2]

- 

          # Create a copy of mmd1 without xmd.mbs, since that will cause validate_mmd to fail

          mmd1_copy = mmd1.copy()

          mmd1_copy.set_xmd({})

+ 

          builds = module_build_service.utils.submit_module_build("foo", mmd1_copy, {})

          ret = {b.mmd().get_context(): b.state for b in builds}

          assert ret == {"c1": models.BUILD_STATES["ready"], "c2": models.BUILD_STATES["init"]}
@@ -1268,7 +1273,6 @@ 

          # The component was reused when the batch first started

          building_component = module_build.current_batch()[0]

          building_component.state = koji.BUILD_STATES["BUILDING"]

-         building_component.reused_component_id = 123

          db.session.commit()

  

          builder = mock.MagicMock()

@@ -51,28 +51,30 @@ 

          ]

          return nsvcs

  

-     def _generate_default_modules(self):

+     def _generate_default_modules(self, db_session):

          """

          Generates gtk:1, gtk:2, foo:1 and foo:2 modules requiring the

          platform:f28 and platform:f29 modules.

          """

-         platform_f28 = make_module("platform:f28:0:c10", {}, {})

-         platform_f29 = make_module("platform:f29:0:c11", {}, {})

-         make_module("gtk:1:0:c2", {"platform": ["f28"]}, {}, platform_f28)

-         make_module("gtk:1:0:c3", {"platform": ["f29"]}, {}, platform_f29)

-         make_module("gtk:2:0:c4", {"platform": ["f28"]}, {}, platform_f28)

-         make_module("gtk:2:0:c5", {"platform": ["f29"]}, {}, platform_f29)

-         make_module("foo:1:0:c2", {"platform": ["f28"]}, {}, platform_f28)

-         make_module("foo:1:0:c3", {"platform": ["f29"]}, {}, platform_f29)

-         make_module("foo:2:0:c4", {"platform": ["f28"]}, {}, platform_f28)

-         make_module("foo:2:0:c5", {"platform": ["f29"]}, {}, platform_f29)

-         make_module("app:1:0:c6", {"platform": ["f29"]}, {}, platform_f29)

- 

-     def test_generate_expanded_mmds_context(self):

-         self._generate_default_modules()

+         platform_f28 = make_module(db_session, "platform:f28:0:c10", {}, {})

+         platform_f29 = make_module(db_session, "platform:f29:0:c11", {}, {})

+         make_module(db_session, "gtk:1:0:c2", {"platform": ["f28"]}, {}, platform_f28)

+         make_module(db_session, "gtk:1:0:c3", {"platform": ["f29"]}, {}, platform_f29)

+         make_module(db_session, "gtk:2:0:c4", {"platform": ["f28"]}, {}, platform_f28)

+         make_module(db_session, "gtk:2:0:c5", {"platform": ["f29"]}, {}, platform_f29)

+         make_module(db_session, "foo:1:0:c2", {"platform": ["f28"]}, {}, platform_f28)

+         make_module(db_session, "foo:1:0:c3", {"platform": ["f29"]}, {}, platform_f29)

+         make_module(db_session, "foo:2:0:c4", {"platform": ["f28"]}, {}, platform_f28)

+         make_module(db_session, "foo:2:0:c5", {"platform": ["f29"]}, {}, platform_f29)

+         make_module(db_session, "app:1:0:c6", {"platform": ["f29"]}, {}, platform_f29)

+ 

+     def test_generate_expanded_mmds_context(self, db_session):

+         self._generate_default_modules(db_session)

          module_build = make_module(

-             "app:1:0:c1", {"gtk": ["1", "2"]}, {"platform": ["f28"], "gtk": ["1", "2"]})

-         mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())

+             db_session, "app:1:0:c1", {"gtk": ["1", "2"]}, {"platform": ["f28"], "gtk": ["1", "2"]}

+         )

+         mmds = module_build_service.utils.generate_expanded_mmds(

+             db_session, module_build.mmd())

          contexts = set([mmd.get_context() for mmd in mmds])

          assert set(["e1e005fb", "ce132a1e"]) == contexts

  
@@ -153,10 +155,11 @@ 

          ],

      )

      def test_generate_expanded_mmds_buildrequires(

-         self, requires, build_requires, stream_ambigous, expected_xmd, expected_buildrequires

+         self, requires, build_requires, stream_ambigous, expected_xmd, expected_buildrequires,

+         db_session

      ):

-         self._generate_default_modules()

-         module_build = make_module("app:1:0:c1", requires, build_requires)

+         self._generate_default_modules(db_session)

+         module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

  

          # Check that generate_expanded_mmds raises an exception if stream is ambigous

          # and also that it does not raise an exception otherwise.
@@ -236,10 +239,10 @@ 

              ),

          ],

      )

-     def test_generate_expanded_mmds_requires(self, requires, build_requires, expected):

-         self._generate_default_modules()

-         module_build = make_module("app:1:0:c1", requires, build_requires)

-         mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())

+     def test_generate_expanded_mmds_requires(self, requires, build_requires, expected, db_session):

+         self._generate_default_modules(db_session)

+         module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

+         mmds = module_build_service.utils.generate_expanded_mmds(db_session, module_build.mmd())

  

          requires_per_mmd = set()

          for mmd in mmds:
@@ -318,28 +321,28 @@ 

              ),

          ],

      )

-     def test_get_required_modules_simple(self, requires, build_requires, expected):

-         module_build = make_module("app:1:0:c1", requires, build_requires)

-         self._generate_default_modules()

+     def test_get_required_modules_simple(self, requires, build_requires, expected, db_session):

+         module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

+         self._generate_default_modules(db_session)

          nsvcs = self._get_mmds_required_by_module_recursively(module_build)

          assert set(nsvcs) == set(expected)

  

-     def _generate_default_modules_recursion(self):

+     def _generate_default_modules_recursion(self, db_session):

          """

          Generates the gtk:1 module requiring foo:1 module requiring bar:1

          and lorem:1 modules which require base:f29 module requiring

          platform:f29 module :).

          """

-         base_module = make_module("platform:f29:0:c11", {}, {})

-         make_module("gtk:1:0:c2", {"foo": ["unknown"]}, {}, base_module)

-         make_module("gtk:1:1:c2", {"foo": ["1"]}, {}, base_module)

-         make_module("foo:1:0:c2", {"bar": ["unknown"]}, {}, base_module)

-         make_module("foo:1:1:c2", {"bar": ["1"], "lorem": ["1"]}, {}, base_module)

-         make_module("bar:1:0:c2", {"base": ["unknown"]}, {}, base_module)

-         make_module("bar:1:1:c2", {"base": ["f29"]}, {}, base_module)

-         make_module("lorem:1:0:c2", {"base": ["unknown"]}, {}, base_module)

-         make_module("lorem:1:1:c2", {"base": ["f29"]}, {}, base_module)

-         make_module("base:f29:0:c3", {"platform": ["f29"]}, {}, base_module)

+         base_module = make_module(db_session, "platform:f29:0:c11", {}, {})

+         make_module(db_session, "gtk:1:0:c2", {"foo": ["unknown"]}, {}, base_module)

+         make_module(db_session, "gtk:1:1:c2", {"foo": ["1"]}, {}, base_module)

+         make_module(db_session, "foo:1:0:c2", {"bar": ["unknown"]}, {}, base_module)

+         make_module(db_session, "foo:1:1:c2", {"bar": ["1"], "lorem": ["1"]}, {}, base_module)

+         make_module(db_session, "bar:1:0:c2", {"base": ["unknown"]}, {}, base_module)

+         make_module(db_session, "bar:1:1:c2", {"base": ["f29"]}, {}, base_module)

+         make_module(db_session, "lorem:1:0:c2", {"base": ["unknown"]}, {}, base_module)

+         make_module(db_session, "lorem:1:1:c2", {"base": ["f29"]}, {}, base_module)

+         make_module(db_session, "base:f29:0:c3", {"platform": ["f29"]}, {}, base_module)

  

      @pytest.mark.parametrize(

          "requires,build_requires,expected",
@@ -363,25 +366,25 @@ 

              ),

          ],

      )

-     def test_get_required_modules_recursion(self, requires, build_requires, expected):

-         module_build = make_module("app:1:0:c1", requires, build_requires)

-         self._generate_default_modules_recursion()

+     def test_get_required_modules_recursion(self, requires, build_requires, expected, db_session):

+         module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

+         self._generate_default_modules_recursion(db_session)

          nsvcs = self._get_mmds_required_by_module_recursively(module_build)

          assert set(nsvcs) == set(expected)

  

-     def _generate_default_modules_modules_multiple_stream_versions(self):

+     def _generate_default_modules_modules_multiple_stream_versions(self, db_session):

          """

          Generates the gtk:1 module requiring foo:1 module requiring bar:1

          and lorem:1 modules which require base:f29 module requiring

          platform:f29 module :).

          """

-         f290000 = make_module("platform:f29.0.0:0:c11", {}, {}, virtual_streams=["f29"])

-         f290100 = make_module("platform:f29.1.0:0:c11", {}, {}, virtual_streams=["f29"])

-         f290200 = make_module("platform:f29.2.0:0:c11", {}, {}, virtual_streams=["f29"])

-         make_module("gtk:1:0:c2", {"platform": ["f29"]}, {}, f290000)

-         make_module("gtk:1:1:c2", {"platform": ["f29"]}, {}, f290100)

-         make_module("gtk:1:2:c2", {"platform": ["f29"]}, {}, f290100)

-         make_module("gtk:1:3:c2", {"platform": ["f29"]}, {}, f290200)

+         f290000 = make_module(db_session, "platform:f29.0.0:0:c11", {}, {}, virtual_streams=["f29"])

+         f290100 = make_module(db_session, "platform:f29.1.0:0:c11", {}, {}, virtual_streams=["f29"])

+         f290200 = make_module(db_session, "platform:f29.2.0:0:c11", {}, {}, virtual_streams=["f29"])

+         make_module(db_session, "gtk:1:0:c2", {"platform": ["f29"]}, {}, f290000)

+         make_module(db_session, "gtk:1:1:c2", {"platform": ["f29"]}, {}, f290100)

+         make_module(db_session, "gtk:1:2:c2", {"platform": ["f29"]}, {}, f290100)

+         make_module(db_session, "gtk:1:3:c2", {"platform": ["f29"]}, {}, f290200)

  

      @pytest.mark.parametrize(

          "requires,build_requires,expected",
@@ -393,9 +396,11 @@ 

              )

          ],

      )

-     def test_get_required_modules_stream_versions(self, requires, build_requires, expected):

-         module_build = make_module("app:1:0:c1", requires, build_requires)

-         self._generate_default_modules_modules_multiple_stream_versions()

+     def test_get_required_modules_stream_versions(

+         self, requires, build_requires, expected, db_session

+     ):

+         module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)

+         self._generate_default_modules_modules_multiple_stream_versions(db_session)

          nsvcs = self._get_mmds_required_by_module_recursively(module_build)

          assert set(nsvcs) == set(expected)

  
@@ -424,7 +429,7 @@ 

          assert actual == expected

  

      @pytest.mark.parametrize("virtual_streams", (None, ["f29"], ["lp29"]))

-     def test__get_base_module_mmds_virtual_streams(self, virtual_streams):

+     def test__get_base_module_mmds_virtual_streams(self, virtual_streams, db_session):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=True)

          mmd = module_build_service.utils.load_mmd_file(
@@ -437,7 +442,7 @@ 

          mmd.remove_dependencies(deps)

          mmd.add_dependencies(new_deps)

  

-         make_module("platform:lp29.1.1:12:c11", {}, {}, virtual_streams=virtual_streams)

+         make_module(db_session, "platform:lp29.1.1:12:c11", {}, {}, virtual_streams=virtual_streams)

  

          mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)

          if virtual_streams == ["f29"]:

file modified
+34 -18
@@ -33,6 +33,7 @@ 

  import hashlib

  import pytest

  import re

+ import sqlalchemy

  

  from tests import app, init_data, clean_database, reuse_component_init_data

  from tests import read_staged_data
@@ -377,11 +378,18 @@ 

          init_data(2, contexts=True)

          rv = self.client.get("/module-build-service/1/module-builds/?context=3a4057d2")

          items = json.loads(rv.data)["items"]

+ 

+         checking_build_id = 3

+         # Get component build ids dynamically rather than hardcode inside expected output.

+         component_builds = sorted(

+             cb.id for cb in ModuleBuild.query.get(checking_build_id).component_builds

+         )

+ 

          expected = [

              {

-                 "component_builds": [3, 4],

+                 "component_builds": component_builds,

                  "context": "3a4057d2",

-                 "id": 3,

+                 "id": checking_build_id,

                  "koji_tag": "module-nginx-1.2",

                  "name": "nginx",

                  "owner": "Moe Szyslak",
@@ -420,6 +428,9 @@ 

                  "buildrequires": {},

              }

          ]

+ 

+         # To avoid different order of component builds impact the subsequent assertion.

+         items[0]['component_builds'] = sorted(items[0]['component_builds'])

          assert items == expected

  

      def test_query_builds_with_id_error(self):
@@ -753,13 +764,22 @@ 

          platform_f28.version = "150"

          db.session.add(platform_f28)

          db.session.commit()

+         # Simply assert the order of all module builds

+         page_size = ModuleBuild.query.count()

          rv = self.client.get(

              "/module-build-service/1/module-builds/?order_desc_by=stream_version"

-             "&order_desc_by=version"

+             "&order_desc_by=version&per_page={}".format(page_size)

          )

          items = json.loads(rv.data)["items"]

-         expected_ids = [8, 6, 4, 1, 2, 12, 3, 5, 7, 9]

          actual_ids = [item["id"] for item in items]

+ 

+         expected_ids = [

+             build.id for build in ModuleBuild.query.order_by(

+                 ModuleBuild.stream_version.desc(),

+                 sqlalchemy.cast(ModuleBuild.version, sqlalchemy.BigInteger).desc()

+             ).all()

+         ]

+ 

          assert actual_ids == expected_ids

  

      def test_query_builds_order_desc_by(self):
@@ -1990,22 +2010,18 @@ 

      def test_buildrequires_is_included_in_json_output(self):

          # Inject xmd/mbs/buildrequires into an existing module build for

          # assertion later.

-         from module_build_service.models import make_session

-         from module_build_service import conf

- 

          br_modulea = dict(stream="6", version="1", context="1234")

          br_moduleb = dict(stream="10", version="1", context="5678")

-         with make_session(conf) as session:

-             build = ModuleBuild.query.first()

-             mmd = build.mmd()

-             xmd = mmd.get_xmd()

-             mbs = xmd.setdefault("mbs", {})

-             buildrequires = mbs.setdefault("buildrequires", {})

-             buildrequires["modulea"] = br_modulea

-             buildrequires["moduleb"] = br_moduleb

-             mmd.set_xmd(xmd)

-             build.modulemd = mmd_to_str(mmd)

-             session.commit()

+         build = ModuleBuild.query.first()

+         mmd = build.mmd()

+         xmd = mmd.get_xmd()

+         mbs = xmd.setdefault("mbs", {})

+         buildrequires = mbs.setdefault("buildrequires", {})

+         buildrequires["modulea"] = br_modulea

+         buildrequires["moduleb"] = br_moduleb

+         mmd.set_xmd(xmd)

+         build.modulemd = mmd_to_str(mmd)

+         db.session.commit()

  

          rv = self.client.get("/module-build-service/1/module-builds/{}".format(build.id))

          data = json.loads(rv.data)

Most of the issues are caused by the use of SQLAlchemy database session. Some
inline comments describe the issues in detail.

Signed-off-by: Chenxiong Qi cqi@redhat.com

Test test_submit_build_repo_regen_not_started_batch fails when run with PostgreSQL and reports following error:

  File "/src/module_build_service/scheduler/consumer.py", line 296, in get_global_consumer
    raise ValueError("No MBSConsumer found among %r." % len(hub.consumers))
ValueError: No MBSConsumer found among 0.

It works well with SQLite, however.

This error is quite confusing. After some investigation, I still can't find out the root reason. @mprahl any thought on this error?

Instead of repeating this in a bunch of classes, I'd like us to use a Pytest fixture instead.
https://docs.pytest.org/en/latest/fixture.html

Setting scope="function" should do the trick.

Test test_submit_build_repo_regen_not_started_batch fails when run with PostgreSQL and reports following error:
File "/src/module_build_service/scheduler/consumer.py", line 296, in get_global_consumer
raise ValueError("No MBSConsumer found among %r." % len(hub.consumers))
ValueError: No MBSConsumer found among 0.

It works well with SQLite, however.
This error is quite confusing. After some investigation, I still can't find out the root reason. @mprahl any thought on this error?

The issue is that the mocked at_concurrent_component_threshold (_at_concurrent_component_threshold) was still seeing the module build as being on batch 1 when using self.db_session to perform the query. This is because the code in start_next_batch_build increased the batch but didn't commit that back to the database yet. Using the passed-in session worked for me.

Here is the diff:

diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py
index e1a629d..d690c08 100644
--- a/tests/test_build/test_build.py
+++ b/tests/test_build/test_build.py
@@ -1634,7 +1634,7 @@ class TestBuild(BaseTestBuild):
         ) as mock_acct:
             # Once we get to batch 2, then simulate the concurrent threshold being met
             def _at_concurrent_component_threshold(config, session):
-                return self.db_session.query(models.ModuleBuild).get(module_build_id).batch == 2
+                return session.query(models.ModuleBuild).get(module_build_id).batch == 2

             mock_acct.side_effect = _at_concurrent_component_threshold
             self.run_scheduler(stop_condition=_stop_condition)

rebased onto e49f69f

4 years ago

@mprahl Thanks for your comments. This PR is updated and all your comments should be addressed.

Why is this necessary?

I was confused by the failure of this test. After debugging, I found that the original code just asserts the module build ids in the first page, which defaults to 10. To avoid that happens again for other developers, I made this change to assert the order of all module builds simply, that is also a little convenient to write code to collect expected module build ids.

I add a comment to describe this line.

:thumbsup: nice job! I'll go ahead and merge this, could you file a separate PR to reenable running the unit tests with Postgres on the Jenkins job?

Commit f3fecf5 fixes this pull-request

Pull-Request has been merged by mprahl

4 years ago

Pull-Request has been merged by mprahl

4 years ago