#1319 Reuse method read_staged_data and ModuleBuild.get_by_id
Merged 4 years ago by mprahl. Opened 4 years ago by cqi.
cqi/fm-orchestrator reuse  into  master

file modified
+17 -23
@@ -26,7 +26,7 @@ 

  import time

  import hashlib

  from traceback import extract_stack

- from module_build_service.utils import to_text_type, load_mmd_file

+ from module_build_service.utils import to_text_type, load_mmd

  

  import koji

  import module_build_service
@@ -44,16 +44,22 @@ 

  conf = init_config(app)

  

  

+ def staged_data_filename(filename):

+     return os.path.join(base_dir, "staged_data", filename)

+ 

+ 

  def read_staged_data(yaml_name):

      """Read module YAML content from staged_data directory

  

-     :param str yaml_name: name of YAML file without extension ``.yaml``.

+     :param str yaml_name: name of YAML file which could be with or without

+         extension ``.yaml``. ``.yaml`` will be added if extension is omitted.

      :return: module YAML file's content.

      :rtype: str

      :raises ValueError: if specified module YAML file does not exist in

          staged_data directory.

      """

-     filename = os.path.join(base_dir, "staged_data", "{}.yaml".format(yaml_name))

+     filename = staged_data_filename(

+         yaml_name if '.' in yaml_name else "{}.yaml".format(yaml_name))

      if not os.path.exists(filename):

          raise ValueError("Staged data {}.yaml does not exist.".format(yaml_name))

      with open(filename, "r") as mmd:
@@ -114,7 +120,7 @@ 

          db.session.commit()

  

      if add_platform_module:

-         mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(read_staged_data("platform"))

          import_mmd(db.session, mmd)

  

  
@@ -133,7 +139,7 @@ 

      if multiple_stream_versions:

          if multiple_stream_versions is True:

              multiple_stream_versions = ["f28.0.0", "f29.0.0", "f29.1.0", "f29.2.0"]

-         mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(read_staged_data("platform"))

          for stream in multiple_stream_versions:

              mmd = mmd.copy("platform", stream)

  
@@ -334,10 +340,7 @@ 

      """

      clean_database()

  

-     current_dir = os.path.dirname(__file__)

-     formatted_testmodule_yml_path = os.path.join(

-         current_dir, "staged_data", "formatted_testmodule.yaml")

-     mmd = load_mmd_file(formatted_testmodule_yml_path)

+     mmd = load_mmd(read_staged_data("formatted_testmodule"))

      mmd.get_rpm_component("tangerine").set_buildorder(0)

  

      module_build = module_build_service.models.ModuleBuild(
@@ -365,7 +368,7 @@ 

      db_session.add(module_build)

      db_session.commit()

  

-     platform_br = db_session.query(module_build_service.models.ModuleBuild).get(1)

+     platform_br = module_build_service.models.ModuleBuild.get_by_id(db_session, 1)

      module_build.buildrequires.append(platform_br)

  

      arch = db_session.query(module_build_service.models.ModuleArch).get(1)
@@ -440,10 +443,7 @@ 

  def reuse_component_init_data():

      clean_database()

  

-     current_dir = os.path.dirname(__file__)

-     formatted_testmodule_yml_path = os.path.join(

-         current_dir, "staged_data", "formatted_testmodule.yaml")

-     mmd = load_mmd_file(formatted_testmodule_yml_path)

+     mmd = load_mmd(read_staged_data("formatted_testmodule"))

  

      build_one = module_build_service.models.ModuleBuild(

          name="testmodule",
@@ -477,7 +477,7 @@ 

      db.session.commit()

      db.session.refresh(build_one)

  

-     platform_br = module_build_service.models.ModuleBuild.query.get(1)

+     platform_br = module_build_service.models.ModuleBuild.get_by_id(db.session, 1)

      build_one.buildrequires.append(platform_br)

  

      arch = module_build_service.models.ModuleArch.query.get(1)
@@ -635,10 +635,7 @@ 

      with make_session(conf) as session:

          # Create shared-userspace-570, state is COMPLETE, all components

          # are properly built.

-         current_dir = os.path.dirname(__file__)

-         formatted_testmodule_yml_path = os.path.join(

-             current_dir, "staged_data", "shared-userspace-570.yaml")

-         mmd = load_mmd_file(formatted_testmodule_yml_path)

+         mmd = load_mmd(read_staged_data("shared-userspace-570"))

  

          module_build = module_build_service.models.ModuleBuild(

              name=mmd.get_module_name(),
@@ -691,10 +688,7 @@ 

          session.commit()

  

          # Create shared-userspace-577, state is WAIT, no component built

-         formatted_testmodule_yml_path = os.path.join(

-             current_dir, "staged_data", "shared-userspace-577.yaml"

-         )

-         mmd2 = load_mmd_file(formatted_testmodule_yml_path)

+         mmd2 = load_mmd(read_staged_data("shared-userspace-577"))

  

          module_build = module_build_service.models.ModuleBuild(

              name=mmd2.get_module_name(),

file modified
+5 -5
@@ -24,19 +24,19 @@ 

  

  from module_build_service import conf

  from module_build_service.models import make_session

- from module_build_service.utils.general import load_mmd_file, mmd_to_str

- 

+ from module_build_service.utils.general import mmd_to_str, load_mmd

+ from tests import read_staged_data

  

  BASE_DIR = os.path.dirname(__file__)

  STAGED_DATA_DIR = os.path.join(BASE_DIR, "staged_data")

  

- _mmd = load_mmd_file(os.path.join(STAGED_DATA_DIR, "platform.yaml"))

+ _mmd = load_mmd(read_staged_data("platform"))

  PLATFORM_MODULEMD = mmd_to_str(_mmd)

  

- _mmd2 = load_mmd_file(os.path.join(STAGED_DATA_DIR, "formatted_testmodule.yaml"))

+ _mmd2 = load_mmd(read_staged_data("formatted_testmodule"))

  TESTMODULE_MODULEMD = mmd_to_str(_mmd2)

  

- _mmd3 = load_mmd_file(os.path.join(STAGED_DATA_DIR, "formatted_testmodule.yaml"))

+ _mmd3 = load_mmd(read_staged_data("formatted_testmodule"))

  _mmd3.set_context("c2c572ed")

  TESTMODULE_MODULEMD_SECOND_CONTEXT = mmd_to_str(_mmd3)

  

file modified
+15 -26
@@ -29,9 +29,9 @@ 

  from datetime import datetime, timedelta

  from random import randint

  import hashlib

- from module_build_service.utils import to_text_type

  

  import module_build_service.messaging

+ import module_build_service.scheduler.consumer

  import module_build_service.scheduler.handlers.repos

  import module_build_service.utils

  from module_build_service.errors import Forbidden
@@ -43,14 +43,15 @@ 

  import kobo

  import pytest

  

- from tests import app, reuse_component_init_data, clean_database

  import json

  import itertools

  

  from module_build_service.builder.base import GenericBuilder

  from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

- import module_build_service.scheduler.consumer

  from module_build_service.messaging import MBSModule

+ from tests import (

+     app, reuse_component_init_data, clean_database, read_staged_data, staged_data_filename

+ )

  

  base_dir = dirname(dirname(__file__))

  
@@ -81,9 +82,7 @@ 

      def checkout(self, temp_dir):

          self.sourcedir = path.join(temp_dir, self.name)

          mkdir(self.sourcedir)

-         base_dir = path.abspath(path.dirname(__file__))

-         copyfile(

-             path.join(base_dir, "..", "staged_data", self.mmd_filename), self.get_module_yaml())

+         copyfile(staged_data_filename(self.mmd_filename), self.get_module_yaml())

  

          return self.sourcedir

  
@@ -578,9 +577,8 @@ 

      ):

          FakeSCM(mocked_scm, "testmodule", "testmodule.yaml")

  

-         testmodule = os.path.join(base_dir, "staged_data", "testmodule.yaml")

-         with open(testmodule) as f:

-             yaml = to_text_type(f.read())

+         testmodule_filename = staged_data_filename("testmodule.yaml")

+         yaml = read_staged_data("testmodule")

  

          with patch.object(

              module_build_service.config.Config,
@@ -591,7 +589,7 @@ 

              rv = self.client.post(

                  "/module-build-service/1/module-builds/",

                  content_type="multipart/form-data",

-                 data={"yaml": (testmodule, yaml)},

+                 data={"yaml": (testmodule_filename, yaml)},

              )

              data = json.loads(rv.data)

              assert data["status"] == 403
@@ -604,7 +602,6 @@ 

      ):

          FakeSCM(

              mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")

-         testmodule = os.path.join(base_dir, "staged_data", "testmodule.yaml")

  

          with patch.object(

              module_build_service.config.Config,
@@ -612,7 +609,7 @@ 

              new_callable=PropertyMock,

              return_value=True,

          ):

-             with open(testmodule, "rb") as f:

+             with open(staged_data_filename("testmodule.yaml"), "rb") as f:

                  yaml_file = FileStorage(f)

                  rv = self.client.post(

                      "/module-build-service/1/module-builds/",
@@ -1123,11 +1120,7 @@ 

          build_one.runtime_context = "9c690d0e"

          build_one.context = "9c690d0e"

          build_one.state = models.BUILD_STATES["failed"]

-         current_dir = os.path.dirname(__file__)

-         formatted_testmodule_yml_path = os.path.join(

-             current_dir, "..", "staged_data", "formatted_testmodule.yaml")

-         with open(formatted_testmodule_yml_path, "r") as f:

-             build_one.modulemd = to_text_type(f.read())

+         build_one.modulemd = read_staged_data("formatted_testmodule")

          build_one.koji_tag = "module-testmodule-master-20180205135154-9c690d0e"

          build_one.scmurl = "https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453"

          build_one.batch = 2
@@ -1266,11 +1259,7 @@ 

          # this is not calculated by real but just a value to

          # match the calculated context from expanded test mmd

          build_one.context = "9c690d0e"

-         current_dir = os.path.dirname(__file__)

-         formatted_testmodule_yml_path = os.path.join(

-             current_dir, "..", "staged_data", "formatted_testmodule.yaml")

-         with open(formatted_testmodule_yml_path, "r") as f:

-             build_one.modulemd = to_text_type(f.read())

+         build_one.modulemd = read_staged_data("formatted_testmodule")

          build_one.koji_tag = "module-testmodule-master-20180205135154-6ef9a711"

          build_one.scmurl = "https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453"

          build_one.batch = 2
@@ -1661,7 +1650,7 @@ 

  

          # Simulate a random repo regen message that MBS didn't expect

          cleanup_moksha()

-         module = db_session.query(models.ModuleBuild).get(module_build_id)

+         module = models.ModuleBuild.get_by_id(db_session, module_build_id)

          msgs = [

              module_build_service.messaging.KojiRepoChange(

                  msg_id="a faked internal message", repo_tag=module.koji_tag + "-build"
@@ -1684,8 +1673,8 @@ 

          Test that when a build is submitted with a buildrequire without a Koji tag,

          MBS doesn't supply it as a dependency to the builder.

          """

-         metadata_mmd = module_build_service.utils.load_mmd_file(

-             path.join(base_dir, "staged_data", "build_metadata_module.yaml")

+         metadata_mmd = module_build_service.utils.load_mmd(

+             read_staged_data("build_metadata_module")

          )

          module_build_service.utils.import_mmd(db.session, metadata_mmd)

  
@@ -1744,7 +1733,7 @@ 

      @patch(

          "module_build_service.config.Config.mock_resultsdir",

          new_callable=PropertyMock,

-         return_value=path.join(base_dir, "staged_data", "local_builds"),

+         return_value=staged_data_filename('local_builds'),

      )

      def test_submit_build_local_dependency(

          self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc, db_session

@@ -136,7 +136,7 @@ 

              "/module-base-runtime-0.25-9/latest/x86_64"

          )

  

-     def test_recover_orphaned_artifact_when_tagged(self):

+     def test_recover_orphaned_artifact_when_tagged(self, db_session):

          """ Test recover_orphaned_artifact when the artifact is found and tagged in both tags

          """

          builder = FakeKojiModuleBuilder(
@@ -154,7 +154,7 @@ 

          build_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, "build_id": 91}]

          dest_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, "build_id": 91}]

          builder.koji_session.listTagged.side_effect = [build_tagged, dest_tagged]

-         module_build = module_build_service.models.ModuleBuild.query.get(4)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 4)

          component_build = module_build.component_builds[0]

          component_build.task_id = None

          component_build.state = None
@@ -181,7 +181,7 @@ 

          assert component_build.state_reason == "Found existing build"

          assert builder.koji_session.tagBuild.call_count == 0

  

-     def test_recover_orphaned_artifact_when_untagged(self):

+     def test_recover_orphaned_artifact_when_untagged(self, db_session):

          """ Tests recover_orphaned_artifact when the build is found but untagged

          """

          builder = FakeKojiModuleBuilder(
@@ -203,7 +203,7 @@ 

          builder.koji_session.untaggedBuilds.return_value = untagged

          build_info = {"nvr": "foo-1.0-1.{0}".format(dist_tag), "task_id": 12345, "build_id": 91}

          builder.koji_session.getBuild.return_value = build_info

-         module_build = module_build_service.models.ModuleBuild.query.get(4)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 4)

          component_build = module_build.component_builds[0]

          component_build.task_id = None

          component_build.nvr = None
@@ -224,7 +224,7 @@ 

          assert component_build.state_reason == "Found existing build"

          builder.koji_session.tagBuild.assert_called_once_with(2, "foo-1.0-1.{0}".format(dist_tag))

  

-     def test_recover_orphaned_artifact_when_nothing_exists(self):

+     def test_recover_orphaned_artifact_when_nothing_exists(self, db_session):

          """ Test recover_orphaned_artifact when the build is not found

          """

          builder = FakeKojiModuleBuilder(
@@ -243,7 +243,7 @@ 

          builder.koji_session.listTagged.return_value = tagged

          untagged = [{"nvr": "foo-1.0-1.nope", "release": "nope"}]

          builder.koji_session.untaggedBuilds.return_value = untagged

-         module_build = module_build_service.models.ModuleBuild.query.get(4)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 4)

          component_build = module_build.component_builds[0]

          component_build.task_id = None

          component_build.nvr = None
@@ -729,7 +729,9 @@ 

          ),

      )

      @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")

-     def test_get_filtered_rpms_on_self_dep(self, ClientSession, br_filtered_rpms, expected):

+     def test_get_filtered_rpms_on_self_dep(

+         self, ClientSession, br_filtered_rpms, expected, db_session

+     ):

          session = ClientSession.return_value

          session.listTaggedRPMS.return_value = (

              [
@@ -774,7 +776,7 @@ 

              ],

          )

          reuse_component_init_data()

-         current_module = module_build_service.models.ModuleBuild.query.get(3)

+         current_module = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)

          rv = KojiModuleBuilder._get_filtered_rpms_on_self_dep(current_module, br_filtered_rpms)

          assert set(rv) == set(expected)

          session.assert_not_called()

@@ -10,8 +10,8 @@ 

  from module_build_service import conf

  from module_build_service.models import ModuleBuild, ComponentBuild, make_session

  from module_build_service.builder.MockModuleBuilder import MockModuleBuilder

- from module_build_service.utils import import_fake_base_module, load_mmd_file, mmd_to_str

- from tests import clean_database, make_module

+ from module_build_service.utils import import_fake_base_module, mmd_to_str, load_mmd

+ from tests import clean_database, make_module, read_staged_data

  

  

  class TestMockModuleBuilder:
@@ -24,9 +24,7 @@ 

          shutil.rmtree(self.resultdir)

  

      def _create_module_with_filters(self, session, batch, state):

-         base_dir = os.path.abspath(os.path.dirname(__file__))

-         mmd = load_mmd_file(

-             os.path.join(base_dir, "..", "staged_data", "testmodule-with-filters.yaml"))

+         mmd = load_mmd(read_staged_data("testmodule-with-filters"))

          # Set the name and stream

          mmd = mmd.copy("mbs-testmodule", "test")

          mmd.set_xmd({

@@ -20,14 +20,13 @@ 

  #

  # Written by Ralph Bean <rbean@redhat.com>

  

- import os

  import pytest

  

  from mock import patch

  from module_build_service import conf

  from module_build_service.models import ComponentBuild, ModuleBuild, make_session

- from module_build_service.utils.general import load_mmd_file, mmd_to_str

- from tests import init_data as init_data_contexts, clean_database, make_module

+ from module_build_service.utils.general import mmd_to_str, load_mmd

+ from tests import init_data as init_data_contexts, clean_database, make_module, read_staged_data

  from tests.test_models import init_data, module_build_from_modulemd

  

  
@@ -66,10 +65,7 @@ 

          """ Test that the build_context, runtime_context, and context hashes are correctly

          determined"""

          build = ModuleBuild.query.filter_by(id=1).one()

-         yaml_path = os.path.join(

-             os.path.dirname(__file__), "..", "staged_data", "testmodule_dependencies.yaml")

-         mmd = load_mmd_file(yaml_path)

-         build.modulemd = mmd_to_str(mmd)

+         build.modulemd = read_staged_data("testmodule_dependencies")

          (

              build.ref_build_context,

              build.build_context,
@@ -81,22 +77,19 @@ 

          assert build.runtime_context == "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c"

          assert build.context == "3ee22b28"

  

-     def test_siblings_property(self):

+     def test_siblings_property(self, db_session):

          """ Tests that the siblings property returns the ID of all modules with

          the same name:stream:version

          """

          clean_database()

-         yaml_path = os.path.join(

-             os.path.dirname(__file__), "..", "staged_data", "formatted_testmodule.yaml")

-         mmd = load_mmd_file(yaml_path)

-         with make_session(conf) as session:

-             for i in range(3):

-                 build = module_build_from_modulemd(mmd_to_str(mmd))

-                 build.build_context = "f6e2aeec7576196241b9afa0b6b22acf2b6873d" + str(i)

-                 build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str(i)

-                 session.add(build)

-         session.commit()

-         build_one = ModuleBuild.query.get(2)

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

+         for i in range(3):

+             build = module_build_from_modulemd(mmd_to_str(mmd))

+             build.build_context = "f6e2aeec7576196241b9afa0b6b22acf2b6873d" + str(i)

+             build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str(i)

+             db_session.add(build)

+         db_session.commit()

+         build_one = ModuleBuild.get_by_id(db_session, 2)

          assert build_one.siblings == [3, 4]

  

      @pytest.mark.parametrize(

file modified
+23 -26
@@ -28,7 +28,7 @@ 

  

  import module_build_service.resolver as mbs_resolver

  from module_build_service import app, conf, db, models, utils, Modulemd

- from module_build_service.utils import import_mmd, load_mmd_file, mmd_to_str

+ from module_build_service.utils import import_mmd, mmd_to_str, load_mmd

  from module_build_service.models import ModuleBuild

  import tests

  
@@ -41,7 +41,7 @@ 

          tests.reuse_component_init_data()

  

      def test_get_buildrequired_modulemds(self):

-         mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

          with models.make_session(conf) as db_session:

              import_mmd(db_session, mmd)
@@ -90,15 +90,15 @@ 

              assert nsvcs == set(["platform:f29.1.0:3:00000000"])

  

      @pytest.mark.parametrize("empty_buildrequires", [False, True])

-     def test_get_module_build_dependencies(self, empty_buildrequires):

+     def test_get_module_build_dependencies(self, empty_buildrequires, db_session):

          """

          Tests that the buildrequires of testmodule are returned

          """

          expected = set(["module-f28-build"])

-         module = models.ModuleBuild.query.get(2)

+         module = models.ModuleBuild.get_by_id(db_session, 2)

          if empty_buildrequires:

              expected = set()

-             module = models.ModuleBuild.query.get(2)

+             module = models.ModuleBuild.get_by_id(db_session, 2)

              mmd = module.mmd()

              # Wipe out the dependencies

              mmd.clear_dependencies()
@@ -106,19 +106,18 @@ 

              xmd["mbs"]["buildrequires"] = {}

              mmd.set_xmd(xmd)

              module.modulemd = mmd_to_str(mmd)

-             db.session.add(module)

-             db.session.commit()

+             db_session.commit()

          resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

          result = resolver.get_module_build_dependencies(

              "testmodule", "master", "20170109091357", "78e4a6fd").keys()

          assert set(result) == expected

  

-     def test_get_module_build_dependencies_recursive(self):

+     def test_get_module_build_dependencies_recursive(self, db_session):

          """

          Tests that the buildrequires are returned when it is two layers deep

          """

          # Add testmodule2 that requires testmodule

-         module = models.ModuleBuild.query.get(3)

+         module = models.ModuleBuild.get_by_id(db_session, 3)

          mmd = module.mmd()

          # Rename the module

          mmd = mmd.copy("testmodule2")
@@ -139,8 +138,7 @@ 

          module.version = str(mmd.get_version())

          module.koji_tag = "module-ae2adf69caf0e1b6"

  

-         db.session.add(module)

-         db.session.commit()

+         db_session.commit()

  

          resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

          result = resolver.get_module_build_dependencies(
@@ -153,7 +151,7 @@ 

      @patch(

          "module_build_service.config.Config.mock_resultsdir",

          new_callable=PropertyMock,

-         return_value=os.path.join(base_dir, "staged_data", "local_builds"),

+         return_value=tests.staged_data_filename("local_builds"),

      )

      def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system):

          """
@@ -166,13 +164,13 @@ 

              resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

              result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()

  

-             local_path = os.path.join(base_dir, "staged_data", "local_builds")

+             local_path = tests.staged_data_filename("local_builds")

  

              expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]

              assert set(result) == set(expected)

  

-     def test_resolve_requires(self):

-         build = models.ModuleBuild.query.get(2)

+     def test_resolve_requires(self, db_session):

+         build = models.ModuleBuild.get_by_id(db_session, 2)

          resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

          result = resolver.resolve_requires(

              [":".join([build.name, build.stream, build.version, build.context])]
@@ -188,11 +186,11 @@ 

              }

          }

  

-     def test_resolve_profiles(self):

+     def test_resolve_profiles(self, db_session):

          """

          Tests that the profiles get resolved recursively

          """

-         mmd = models.ModuleBuild.query.get(2).mmd()

+         mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()

          resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")

          result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

          expected = {
@@ -240,19 +238,18 @@ 

      @patch(

          "module_build_service.config.Config.mock_resultsdir",

          new_callable=PropertyMock,

-         return_value=os.path.join(base_dir, "staged_data", "local_builds"),

+         return_value=tests.staged_data_filename("local_builds")

      )

-     def test_resolve_profiles_local_module(self, local_builds, conf_system):

+     def test_resolve_profiles_local_module(self, local_builds, conf_system, db_session):

          """

          Test that profiles get resolved recursively on local builds

          """

-         with app.app_context():

-             utils.load_local_builds(["platform"])

-             mmd = models.ModuleBuild.query.get(2).mmd()

-             resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

-             result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

-             expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

-             assert result == expected

+         utils.load_local_builds(["platform"])

+         mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()

+         resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")

+         result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

+         expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}

+         assert result == expected

  

      def test_get_latest_with_virtual_stream(self):

          tests.init_data(1, multiple_stream_versions=True)

@@ -25,7 +25,7 @@ 

  

  import module_build_service.resolver as mbs_resolver

  from module_build_service import db

- from module_build_service.utils.general import import_mmd, load_mmd_file, mmd_to_str

+ from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd

  from module_build_service.models import ModuleBuild

  import tests

  
@@ -38,7 +38,7 @@ 

          tests.reuse_component_init_data()

  

      def test_get_buildrequired_modulemds(self):

-         mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(tests.read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f8")

          import_mmd(db.session, mmd)

          platform_f8 = ModuleBuild.query.filter_by(stream="f8").one()

@@ -348,7 +348,7 @@ 

      @patch(

          "module_build_service.config.Config.mock_resultsdir",

          new_callable=PropertyMock,

-         return_value=os.path.join(base_dir, "staged_data", "local_builds"),

+         return_value=tests.staged_data_filename("local_builds")

      )

      def test_resolve_profiles_local_module(

          self, local_builds, conf_system, formatted_testmodule_mmd

@@ -23,21 +23,20 @@ 

  

  from mock import patch, PropertyMock

  

- from tests import conf, clean_database

+ from tests import conf, clean_database, read_staged_data

  from tests.test_views.test_views import FakeSCM

  import module_build_service.messaging

  import module_build_service.scheduler.handlers.modules

- from module_build_service import build_logs, db

+ from module_build_service import build_logs

  from module_build_service.models import make_session, ModuleBuild, ComponentBuild

- from module_build_service.utils.general import mmd_to_str, load_mmd, load_mmd_file

+ from module_build_service.utils.general import mmd_to_str, load_mmd

  

  

  class TestModuleInit:

      def setup_method(self, test_method):

          self.fn = module_build_service.scheduler.handlers.modules.init

-         self.staged_data_dir = os.path.join(os.path.dirname(__file__), "../", "staged_data")

-         testmodule_yml_path = os.path.join(self.staged_data_dir, "testmodule_init.yaml")

-         mmd = load_mmd_file(testmodule_yml_path)

+         testmodule_yml_path = read_staged_data("testmodule_init")

+         mmd = load_mmd(testmodule_yml_path)

          # Set the name and stream

          mmd = mmd.copy("testmodule", "1")

          scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77"
@@ -60,7 +59,7 @@ 

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules")

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

-     def test_init_basic(self, get_build_arches, rscm, mocked_scm, built_rpms):

+     def init_basic(self, db_session, get_build_arches, rscm, mocked_scm, built_rpms):

          FakeSCM(

              mocked_scm,

              "testmodule",
@@ -77,22 +76,23 @@ 

              "x-debuginfo-0:2.5.48-3.el8+1308+551bfa71",

          ]

  

-         platform_build = ModuleBuild.query.get(1)

+         platform_build = ModuleBuild.get_by_id(db_session, 1)

          mmd = platform_build.mmd()

          for rpm in mmd.get_rpm_filters():

              mmd.remove_rpm_filter(rpm)

          mmd.add_rpm_filter("foo")

          mmd.add_rpm_filter("bar")

+ 

          platform_build.modulemd = mmd_to_str(mmd)

-         db.session.commit()

+         db_session.commit()

  

          msg = module_build_service.messaging.MBSModule(

              msg_id=None, module_build_id=2, module_build_state="init"

          )

  

-         with make_session(conf) as session:

-             self.fn(config=conf, session=session, msg=msg)

-         build = ModuleBuild.query.filter_by(id=2).one()

+         self.fn(config=conf, session=db_session, msg=msg)

+ 

+         build = ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module entered the wait state

          assert build.state == 1, build.state

          # Make sure format_mmd was run properly
@@ -103,15 +103,15 @@ 

          ]

          return build

  

-     def test_init_called_twice(self):

-         build = self.test_init_basic()

+     def test_init_called_twice(self, db_session):

+         build = self.init_basic(db_session)

          old_component_builds = len(build.component_builds)

          old_mmd = load_mmd(build.modulemd)

  

          build.state = 4

-         db.session.commit()

-         build = self.test_init_basic()

-         db.session.refresh(build)

+         db_session.commit()

+         build = self.init_basic(db_session)

+         db_session.refresh(build)

  

          assert build.state == 1

          assert old_component_builds == len(build.component_builds)
@@ -146,8 +146,8 @@ 

      @patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])

      def test_init_includedmodule(self, get_build_arches, mocked_scm, mocked_mod_allow_repo):

          FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])

-         includedmodules_yml_path = os.path.join(self.staged_data_dir, "includedmodules.yaml")

-         mmd = load_mmd_file(includedmodules_yml_path)

+         includedmodules_yml_path = read_staged_data("includedmodules")

+         mmd = load_mmd(includedmodules_yml_path)

          # Set the name and stream

          mmd = mmd.copy("includedmodules", "1")

          scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886"

@@ -27,10 +27,10 @@ 

  import os

  import koji

  import pytest

- from tests import conf, db, scheduler_init_data

+ from tests import conf, db, scheduler_init_data, read_staged_data

  import module_build_service.resolver

  from module_build_service import build_logs, Modulemd

- from module_build_service.utils.general import load_mmd_file

+ from module_build_service.utils.general import load_mmd

  from module_build_service.models import ComponentBuild, ModuleBuild

  

  base_dir = os.path.dirname(os.path.dirname(__file__))
@@ -71,12 +71,8 @@ 

              "state": "some state",

              "id": 1,

          }

- 

-         formatted_testmodule_yml_path = os.path.join(

-             base_dir, "staged_data", "formatted_testmodule.yaml")

-         mmd = load_mmd_file(formatted_testmodule_yml_path)

          mocked_module_build.id = 1

-         mocked_module_build.mmd.return_value = mmd

+         mocked_module_build.mmd.return_value = load_mmd(read_staged_data("formatted_testmodule"))

          mocked_module_build.component_builds = []

  

          from_module_event.return_value = mocked_module_build

@@ -442,28 +442,29 @@ 

          # Ensure we did *not* process any of the non-waiting builds.

          assert consumer.incoming.qsize() == 0

  

-     def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg):

+     def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg, db_session):

          """ Test that one of the two module builds gets to the garbage state when running

          cleanup_stale_failed_builds.

          """

          builder = mock.MagicMock()

          create_builder.return_value = builder

-         module_build_one = models.ModuleBuild.query.get(2)

-         module_build_two = models.ModuleBuild.query.get(3)

+ 

+         module_build_one = models.ModuleBuild.get_by_id(db_session, 2)

          module_build_one.state = models.BUILD_STATES["failed"]

          module_build_one.time_modified = datetime.utcnow() - timedelta(

              days=conf.cleanup_failed_builds_time + 1)

+ 

+         module_build_two = models.ModuleBuild.get_by_id(db_session, 3)

          module_build_two.time_modified = datetime.utcnow()

          module_build_two.state = models.BUILD_STATES["failed"]

-         failed_component = models.ComponentBuild.query.filter_by(

+ 

+         failed_component = db_session.query(models.ComponentBuild).filter_by(

              package="tangerine", module_id=3).one()

          failed_component.state = koji.BUILD_STATES["FAILED"]

          failed_component.tagged = False

          failed_component.tagged_in_final = False

-         db.session.add(failed_component)

-         db.session.add(module_build_one)

-         db.session.add(module_build_two)

-         db.session.commit()

+ 

+         db_session.commit()

  

          consumer = mock.MagicMock()

          consumer.incoming = queue.Queue()
@@ -473,8 +474,8 @@ 

  

          # Ensure the queue is empty before we start

          assert consumer.incoming.qsize() == 0

-         poller.cleanup_stale_failed_builds(conf, db.session)

-         db.session.refresh(module_build_two)

+         poller.cleanup_stale_failed_builds(conf, db_session)

+         db_session.refresh(module_build_two)

          # Make sure module_build_one was transitioned to garbage

          assert module_build_one.state == models.BUILD_STATES["garbage"]

          state_reason = (
@@ -496,25 +497,27 @@ 

              "module-build-macros-0.1-1.module+0+d027b723",

          ])

  

-     def test_cleanup_stale_failed_builds_no_components(self, create_builder, global_consumer, dbg):

+     def test_cleanup_stale_failed_builds_no_components(

+         self, create_builder, global_consumer, dbg, db_session

+     ):

          """ Test that a module build without any components built gets to the garbage state when

          running cleanup_stale_failed_builds.

          """

-         module_build_one = models.ModuleBuild.query.get(1)

-         module_build_two = models.ModuleBuild.query.get(2)

+         module_build_one = models.ModuleBuild.get_by_id(db_session, 1)

          module_build_one.state = models.BUILD_STATES["failed"]

          module_build_one.time_modified = datetime.utcnow()

+ 

+         module_build_two = models.ModuleBuild.get_by_id(db_session, 2)

          module_build_two.state = models.BUILD_STATES["failed"]

          module_build_two.time_modified = datetime.utcnow() - timedelta(

              days=conf.cleanup_failed_builds_time + 1)

          module_build_two.koji_tag = None

          module_build_two.cg_build_koji_tag = None

+ 

          for c in module_build_two.component_builds:

              c.state = None

-             db.session.add(c)

-         db.session.add(module_build_one)

-         db.session.add(module_build_two)

-         db.session.commit()

+ 

+         db_session.commit()

  

          consumer = mock.MagicMock()

          consumer.incoming = queue.Queue()
@@ -524,8 +527,8 @@ 

  

          # Ensure the queue is empty before we start

          assert consumer.incoming.qsize() == 0

-         poller.cleanup_stale_failed_builds(conf, db.session)

-         db.session.refresh(module_build_two)

+         poller.cleanup_stale_failed_builds(conf, db_session)

+         db_session.refresh(module_build_two)

          # Make sure module_build_two was transitioned to garbage

          assert module_build_two.state == models.BUILD_STATES["garbage"]

          state_reason = (
@@ -541,24 +544,26 @@ 

      @pytest.mark.parametrize(

          "test_state", [models.BUILD_STATES[state] for state in conf.cleanup_stuck_builds_states]

      )

-     def test_cancel_stuck_module_builds(self, create_builder, global_consumer, dbg, test_state):

+     def test_cancel_stuck_module_builds(

+         self, create_builder, global_consumer, dbg, test_state, db_session

+     ):

  

-         module_build1 = models.ModuleBuild.query.get(1)

+         module_build1 = models.ModuleBuild.get_by_id(db_session, 1)

          module_build1.state = test_state

          under_thresh = conf.cleanup_stuck_builds_time - 1

          module_build1.time_modified = datetime.utcnow() - timedelta(

              days=under_thresh, hours=23, minutes=59)

  

-         module_build2 = models.ModuleBuild.query.get(2)

+         module_build2 = models.ModuleBuild.get_by_id(db_session, 2)

          module_build2.state = test_state

          module_build2.time_modified = datetime.utcnow() - timedelta(

              days=conf.cleanup_stuck_builds_time)

  

-         module_build2 = models.ModuleBuild.query.get(3)

+         module_build2 = models.ModuleBuild.get_by_id(db_session, 3)

          module_build2.state = test_state

          module_build2.time_modified = datetime.utcnow()

  

-         db.session.commit()

+         db_session.commit()

  

          consumer = mock.MagicMock()

          consumer.incoming = queue.Queue()
@@ -568,9 +573,9 @@ 

  

          assert consumer.incoming.qsize() == 0

  

-         poller.cancel_stuck_module_builds(conf, db.session)

+         poller.cancel_stuck_module_builds(conf, db_session)

  

-         module = models.ModuleBuild.query.filter_by(state=4).all()

+         module = db_session.query(models.ModuleBuild).filter_by(state=4).all()

          assert len(module) == 1

          assert module[0].id == 2

  
@@ -634,18 +639,20 @@ 

  

      @pytest.mark.parametrize("greenwave_result", [True, False])

      @patch("module_build_service.utils.greenwave.Greenwave.check_gating")

-     def test_poll_greenwave(self, mock_gw, create_builder, global_consumer, dbg, greenwave_result):

+     def test_poll_greenwave(

+         self, mock_gw, create_builder, global_consumer, dbg, greenwave_result, db_session

+     ):

  

-         module_build1 = models.ModuleBuild.query.get(1)

+         module_build1 = models.ModuleBuild.get_by_id(db_session, 1)

          module_build1.state = models.BUILD_STATES["ready"]

  

-         module_build2 = models.ModuleBuild.query.get(2)

+         module_build2 = models.ModuleBuild.get_by_id(db_session, 2)

          module_build2.state = models.BUILD_STATES["done"]

  

-         module_build2 = models.ModuleBuild.query.get(3)

+         module_build2 = models.ModuleBuild.get_by_id(db_session, 3)

          module_build2.state = models.BUILD_STATES["init"]

  

-         db.session.commit()

+         db_session.commit()

  

          consumer = mock.MagicMock()

          consumer.incoming = queue.Queue()

@@ -122,15 +122,15 @@ 

  

          # Ensure the time_completed is None, so we can test it is set to

          # some date once the build is finalized.

-         module_build = module_build_service.models.ModuleBuild.query.get(2)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          module_build.time_completed = None

-         db.session.commit()

+         db_session.commit()

  

          def mocked_finalizer(succeeded=None):

              # Check that the time_completed is set in the time when

              # finalizer is called.

              assert succeeded is True

-             module_build = db_session.query(module_build_service.models.ModuleBuild).get(2)

+             module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

              assert module_build.time_completed is not None

  

          finalizer.side_effect = mocked_finalizer
@@ -207,7 +207,7 @@ 

          mock_log_info.assert_called_with(

              "Ignoring repo regen, because not all components are tagged."

          )

-         module_build = module_build_service.models.ModuleBuild.query.get(2)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

          # Make sure the module build didn't transition since all the components weren't tagged

          assert module_build.state == module_build_service.models.BUILD_STATES["build"]

  
@@ -242,6 +242,6 @@ 

          msg = module_build_service.messaging.KojiRepoChange(

              "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")

          module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)

-         module_build = module_build_service.models.ModuleBuild.query.get(2)

+         module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)

  

          assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

file modified
+52 -59
@@ -31,6 +31,7 @@ 

  import module_build_service.scm

  from module_build_service import models, conf

  from module_build_service.errors import ProgrammingError, ValidationError, UnprocessableEntity

+ from module_build_service.utils.general import load_mmd

  from tests import (

      reuse_component_init_data,

      db,
@@ -39,7 +40,7 @@ 

      init_data,

      scheduler_init_data,

      make_module,

- )

+     read_staged_data, staged_data_filename)

  import mock

  import koji

  import pytest
@@ -74,9 +75,7 @@ 

      def checkout(self, temp_dir):

          self.sourcedir = path.join(temp_dir, self.name)

          mkdir(self.sourcedir)

-         base_dir = path.abspath(path.dirname(__file__))

-         copyfile(

-             path.join(base_dir, "..", "staged_data", self.mmd_filename), self.get_module_yaml())

+         copyfile(staged_data_filename(self.mmd_filename), self.get_module_yaml())

  

          return self.sourcedir

  
@@ -315,7 +314,7 @@ 

      def test_get_build_arches(self, ClientSession):

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": "ppc64le"}

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          r = module_build_service.utils.get_build_arches(mmd, conf)

          assert r == ["ppc64le"]

  
@@ -326,7 +325,7 @@ 

          """

          session = ClientSession.return_value

          session.getTag.return_value = {"arches": ""}

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          r = module_build_service.utils.get_build_arches(mmd, conf)

          assert set(r) == set(conf.arches)

  
@@ -336,7 +335,7 @@ 

          return_value=["testmodule"],

      )

      def test_get_build_arches_koji_tag_arches(self, cfg):

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          xmd["mbs"]["koji_tag_arches"] = ["ppc64", "ppc64le"]

          mmd.set_xmd(xmd)
@@ -346,7 +345,7 @@ 

  

      @patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})

      def test_get_build_arches_base_module_override(self):

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          mbs_options = xmd["mbs"] if "mbs" in xmd.keys() else {}

          mbs_options["buildrequires"] = {"platform": {"stream": "xx"}}
@@ -358,7 +357,7 @@ 

  

      @pytest.mark.parametrize("context", ["c1", None])

      def test_import_mmd_contexts(self, context):

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          mmd.set_context(context)

  

          xmd = mmd.get_xmd()
@@ -376,7 +375,7 @@ 

              assert build.context == models.DEFAULT_MODULE_CONTEXT

  

      def test_import_mmd_multiple_dependencies(self):

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          mmd.add_dependencies(mmd.get_dependencies()[0].copy())

  

          expected_error = "The imported module's dependencies list should contain just one element"
@@ -385,7 +384,7 @@ 

              assert str(e.value) == expected_error

  

      def test_import_mmd_no_xmd_buildrequires(self):

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          del xmd["mbs"]["buildrequires"]

          mmd.set_xmd(xmd)
@@ -399,7 +398,7 @@ 

              assert str(e.value) == expected_error

  

      def test_import_mmd_minimal_xmd_from_local_repository(self):

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("formatted_testmodule"))

          xmd = mmd.get_xmd()

          xmd["mbs"] = {}

          xmd["mbs"]["koji_tag"] = "repofile:///etc/yum.repos.d/fedora-modular.repo"
@@ -423,7 +422,7 @@ 

      )

      def test_import_mmd_base_module(self, stream, disttag_marking, error_msg):

          clean_database(add_platform_module=False)

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "platform.yaml"))

+         mmd = load_mmd(read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), stream)

  

          if disttag_marking:
@@ -437,18 +436,20 @@ 

          else:

              module_build_service.utils.import_mmd(db.session, mmd)

  

-     def test_get_rpm_release_mse(self):

+     def test_get_rpm_release_mse(self, db_session):

          init_data(contexts=True)

-         build_one = models.ModuleBuild.query.get(2)

-         build_two = models.ModuleBuild.query.get(3)

+ 

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release_one = module_build_service.utils.get_rpm_release(build_one)

-         release_two = module_build_service.utils.get_rpm_release(build_two)

          assert release_one == "module+2+b8645bbb"

+ 

+         build_two = models.ModuleBuild.get_by_id(db_session, 3)

+         release_two = module_build_service.utils.get_rpm_release(build_two)

          assert release_two == "module+2+17e35784"

  

      def test_get_rpm_release_platform_stream(self, db_session):

          scheduler_init_data(db_session, 1)

-         build_one = db_session.query(models.ModuleBuild).get(2)

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "module+f28+2+814cfa39"

  
@@ -469,7 +470,7 @@ 

          db_session.add(platform)

          db_session.commit()

  

-         build_one = db_session.query(models.ModuleBuild).get(2)

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "module+fedora28+2+814cfa39"

  
@@ -484,14 +485,10 @@ 

          and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag.

          """

          scheduler_init_data(db_session, 1)

-         mmd_path = path.abspath(

-             path.join(

-                 __file__, path.pardir, path.pardir, "staged_data", "build_metadata_module.yaml")

-         )

-         metadata_mmd = module_build_service.utils.load_mmd_file(mmd_path)

+         metadata_mmd = load_mmd(read_staged_data("build_metadata_module"))

          module_build_service.utils.import_mmd(db_session, metadata_mmd)

  

-         build_one = db_session.query(models.ModuleBuild).get(2)

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          mmd = build_one.mmd()

          deps = mmd.get_dependencies()[0]

          deps.add_buildtime_stream("build", "product1.2")
@@ -511,18 +508,20 @@ 

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "module+product12+2+814cfa39"

  

-     def test_get_rpm_release_mse_scratch(self):

+     def test_get_rpm_release_mse_scratch(self, db_session):

          init_data(contexts=True, scratch=True)

-         build_one = models.ModuleBuild.query.get(2)

-         build_two = models.ModuleBuild.query.get(3)

+ 

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release_one = module_build_service.utils.get_rpm_release(build_one)

-         release_two = module_build_service.utils.get_rpm_release(build_two)

          assert release_one == "scrmod+2+b8645bbb"

+ 

+         build_two = models.ModuleBuild.get_by_id(db_session, 3)

+         release_two = module_build_service.utils.get_rpm_release(build_two)

          assert release_two == "scrmod+2+17e35784"

  

      def test_get_rpm_release_platform_stream_scratch(self, db_session):

          scheduler_init_data(db_session, 1, scratch=True)

-         build_one = db_session.query(models.ModuleBuild).get(2)

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          release = module_build_service.utils.get_rpm_release(build_one)

          assert release == "scrmod+f28+2+814cfa39"

  
@@ -530,7 +529,7 @@ 

      def test_record_module_build_arches(self, get_build_arches, db_session):

          get_build_arches.return_value = ["x86_64", "i686"]

          scheduler_init_data(db_session, 1)

-         build = db_session.query(models.ModuleBuild).get(2)

+         build = models.ModuleBuild.get_by_id(db_session, 2)

          build.arches = []

          module_build_service.utils.record_module_build_arches(build.mmd(), build, db_session)

  
@@ -565,7 +564,7 @@ 

              return hashes_returned[ref]

  

          mocked_scm.return_value.get_latest = mocked_get_latest

-         mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml"))

+         mmd = load_mmd(read_staged_data("testmodule"))

          # Modify the component branches so we can identify them later on

          mmd.get_rpm_component("perl-Tangerine").set_ref("f28")

          mmd.get_rpm_component("tangerine").set_ref("f27")
@@ -597,7 +596,7 @@ 

          mmd_xmd = mmd.get_xmd()

          assert mmd_xmd == xmd

  

-     def test_get_reusable_component_shared_userspace_ordering(self):

+     def test_get_reusable_component_shared_userspace_ordering(self, db_session):

          """

          For modules with lot of components per batch, there is big chance that

          the database will return them in different order than what we have for
@@ -605,7 +604,7 @@ 

          reuse the components.

          """

          reuse_shared_userspace_init_data()

-         new_module = models.ModuleBuild.query.get(3)

+         new_module = models.ModuleBuild.get_by_id(db_session, 3)

          rv = module_build_service.utils.get_reusable_component(db.session, new_module, "llvm")

          assert rv.package == "llvm"

  
@@ -724,8 +723,7 @@ 

                  "fbed359411a1baa08d4a88e0d12d426fbf8f602c",

              ]

  

-             testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml")

-             mmd = load_mmd_file(testmodule_mmd_path)

+             mmd = load_mmd(read_staged_data("testmodule"))

              mmd = mmd.copy("testmodule-variant", "master")

              module_build = module_build_service.models.ModuleBuild()

              module_build.name = "testmodule-variant"
@@ -770,8 +768,7 @@ 

                  "dbed259411a1baa08d4a88e0d12d426fbf8f6037",

              ]

  

-             testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml")

-             mmd = load_mmd_file(testmodule_mmd_path)

+             mmd = load_mmd(read_staged_data("testmodule"))

              # Set the module name and stream

              mmd = mmd.copy("testmodule", "master")

              module_build = module_build_service.models.ModuleBuild()
@@ -811,7 +808,7 @@ 

                  "dbed259411a1baa08d4a88e0d12d426fbf8f6037",

              ]

  

-             testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml")

+             testmodule_mmd_path = staged_data_filename("testmodule.yaml")

              test_archs = ["powerpc", "i486"]

  

              mmd1 = load_mmd_file(testmodule_mmd_path)
@@ -837,26 +834,23 @@ 

  

      @patch("module_build_service.scm.SCM")

      @patch("module_build_service.utils.submit.ThreadPool")

-     def test_format_mmd_update_time_modified(self, tp, mocked_scm):

-         with app.app_context():

-             init_data()

-             build = models.ModuleBuild.query.get(2)

- 

-             async_result = mock.MagicMock()

-             async_result.ready.side_effect = [False, False, False, True]

-             tp.return_value.map_async.return_value = async_result

+     def test_format_mmd_update_time_modified(self, tp, mocked_scm, db_session):

+         init_data()

+         build = models.ModuleBuild.get_by_id(db_session, 2)

  

-             test_datetime = datetime(2019, 2, 14, 11, 11, 45, 42968)

+         async_result = mock.MagicMock()

+         async_result.ready.side_effect = [False, False, False, True]

+         tp.return_value.map_async.return_value = async_result

  

-             testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml")

+         test_datetime = datetime(2019, 2, 14, 11, 11, 45, 42968)

  

-             mmd1 = load_mmd_file(testmodule_mmd_path)

+         mmd = load_mmd(read_staged_data("testmodule"))

  

-             with patch("module_build_service.utils.submit.datetime") as dt:

-                 dt.utcnow.return_value = test_datetime

-                 module_build_service.utils.format_mmd(mmd1, None, build, db.session)

+         with patch("module_build_service.utils.submit.datetime") as dt:

+             dt.utcnow.return_value = test_datetime

+             module_build_service.utils.format_mmd(mmd, None, build, db_session)

  

-             assert build.time_modified == test_datetime

+         assert build.time_modified == test_datetime

  

      def test_generate_koji_tag_in_nsvc_format(self):

          name, stream, version, context = ("testmodule", "master", "20170816080815", "37c6c57")
@@ -902,13 +896,13 @@ 

  

      def test_get_prefixed_version_f28(self, db_session):

          scheduler_init_data(db_session, 1)

-         build_one = db_session.query(models.ModuleBuild).get(2)

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          v = module_build_service.utils.submit.get_prefixed_version(build_one.mmd())

          assert v == 2820180205135154

  

      def test_get_prefixed_version_fl701(self, db_session):

          scheduler_init_data(db_session, 1)

-         build_one = db_session.query(models.ModuleBuild).get(2)

+         build_one = models.ModuleBuild.get_by_id(db_session, 2)

          mmd = build_one.mmd()

          xmd = mmd.get_xmd()

          xmd["mbs"]["buildrequires"]["platform"]["stream"] = "fl7.0.1-beta"
@@ -1304,7 +1298,7 @@ 

  @patch(

      "module_build_service.config.Config.mock_resultsdir",

      new_callable=mock.PropertyMock,

-     return_value=path.join(BASE_DIR, "..", "staged_data", "local_builds"),

+     return_value=staged_data_filename("local_builds")

  )

  @patch(

      "module_build_service.config.Config.system", new_callable=mock.PropertyMock, return_value="mock"
@@ -1404,8 +1398,7 @@ 

          with patch("dnf.Base") as dnf_base:

              repo = mock.MagicMock()

              repo.repofile = "/etc/yum.repos.d/foo.repo"

-             tm_path = path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml")

-             mmd = load_mmd_file(tm_path)

+             mmd = load_mmd(read_staged_data("formatted_testmodule"))

              repo.get_metadata_content.return_value = mmd_to_str(mmd)

              base = dnf_base.return_value

              base.repos = {"reponame": repo}

@@ -18,15 +18,13 @@ 

  # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE

  # SOFTWARE.

  

- import os

- 

  from mock import patch, PropertyMock

  import pytest

  

  import module_build_service.utils

  from module_build_service import Modulemd

  from module_build_service.errors import StreamAmbigous

- from tests import db, clean_database, make_module, init_data, base_dir

+ from tests import db, clean_database, make_module, init_data, read_staged_data

  

  

  class TestUtilsModuleStreamExpansion:
@@ -407,8 +405,7 @@ 

      def test__get_base_module_mmds(self):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=True)

-         mmd = module_build_service.utils.load_mmd_file(

-             os.path.join(base_dir, "staged_data", "testmodule_v2.yaml"))

+         mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2.yaml"))

          deps = mmd.get_dependencies()[0]

          new_deps = Modulemd.Dependencies()

          for stream in deps.get_runtime_streams("platform"):
@@ -432,8 +429,7 @@ 

      def test__get_base_module_mmds_virtual_streams(self, virtual_streams, db_session):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=True)

-         mmd = module_build_service.utils.load_mmd_file(

-             os.path.join(base_dir, "staged_data", "testmodule_v2.yaml"))

+         mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2"))

          deps = mmd.get_dependencies()[0]

          new_deps = Modulemd.Dependencies()

          for stream in deps.get_runtime_streams("platform"):
@@ -465,8 +461,7 @@ 

      def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg):

          """Ensure the correct results are returned without duplicates."""

          init_data(data_size=1, multiple_stream_versions=["foo28", "foo29", "foo30"])

-         mmd = module_build_service.utils.load_mmd_file(

-             os.path.join(base_dir, "staged_data", "testmodule_v2.yaml"))

+         mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2"))

          deps = mmd.get_dependencies()[0]

          new_deps = Modulemd.Dependencies()

          for stream in deps.get_runtime_streams("platform"):

file modified
+25 -36
@@ -35,7 +35,7 @@ 

  import re

  import sqlalchemy

  

- from tests import app, init_data, clean_database, reuse_component_init_data

+ from tests import app, init_data, clean_database, reuse_component_init_data, staged_data_filename

  from tests import read_staged_data

  from tests.test_scm import base_dir as scm_base_dir

  from module_build_service.errors import UnprocessableEntity
@@ -44,7 +44,7 @@ 

  import module_build_service.config as mbs_config

  import module_build_service.scheduler.handlers.modules

  from module_build_service.utils.general import (

-     import_mmd, mmd_to_str, to_text_type, load_mmd_file, load_mmd

+     import_mmd, mmd_to_str, load_mmd

  )

  

  
@@ -114,8 +114,7 @@ 

  

          self.sourcedir = path.join(temp_dir, self.name)

          mkdir(self.sourcedir)

-         base_dir = path.abspath(path.dirname(__file__))

-         copyfile(path.join(base_dir, "..", "staged_data", mmd_filename), self.get_module_yaml())

+         copyfile(staged_data_filename(mmd_filename), self.get_module_yaml())

  

          self.checkout_id += 1

  
@@ -204,8 +203,7 @@ 

          assert data["build_context"] is None

          assert data["runtime_context"] is None

          assert data["id"] == 2

-         with open(path.join(base_dir, "staged_data", "nginx_mmd.yaml")) as mmd:

-             assert data["modulemd"] == to_text_type(mmd.read())

+         assert data["modulemd"] == read_staged_data("nginx_mmd")

          assert data["name"] == "nginx"

          assert data["owner"] == "Moe Szyslak"

          assert data["rebuild_strategy"] == "changed-and-after"
@@ -760,7 +758,7 @@ 

  

      def test_query_builds_order_by_multiple(self):

          init_data(data_size=1, multiple_stream_versions=True)

-         platform_f28 = db.session.query(module_build_service.models.ModuleBuild).get(1)

+         platform_f28 = module_build_service.models.ModuleBuild.get_by_id(db.session, 1)

          platform_f28.version = "150"

          db.session.add(platform_f28)

          db.session.commit()
@@ -829,7 +827,7 @@ 

  

      def test_query_base_module_br_filters(self):

          reuse_component_init_data()

-         mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")

          import_mmd(db.session, mmd)

          platform_f300103 = ModuleBuild.query.filter_by(stream="f30.1.3").one()
@@ -947,7 +945,7 @@ 

          module_build_service.utils.load_mmd(data["modulemd"])

  

          # Make sure the buildrequires entry was created

-         module = ModuleBuild.query.get(8)

+         module = ModuleBuild.get_by_id(db.session, 8)

          assert len(module.buildrequires) == 1

          assert module.buildrequires[0].name == "platform"

          assert module.buildrequires[0].stream == "f28"
@@ -2084,7 +2082,7 @@ 

          module_build_service.utils.load_mmd(data["modulemd"])

  

          # Make sure the buildrequires entry was created

-         module = ModuleBuild.query.get(8)

+         module = ModuleBuild.get_by_id(db.session, 8)

          assert len(module.buildrequires) == 1

          assert module.buildrequires[0].name == "platform"

          assert module.buildrequires[0].stream == "f28"
@@ -2138,18 +2136,15 @@ 

      def test_submit_scratch_build_with_mmd(

          self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user, api_version

      ):

-         base_dir = path.abspath(path.dirname(__file__))

-         mmd_path = path.join(base_dir, "..", "staged_data", "testmodule.yaml")

-         post_url = "/module-build-service/{0}/module-builds/".format(api_version)

-         with open(mmd_path, "rb") as f:

-             modulemd = f.read().decode("utf-8")

+         modulemd = read_staged_data("testmodule")

  

          post_data = {

              "branch": "master",

              "scratch": True,

              "modulemd": modulemd,

-             "module_name": str(splitext(basename(mmd_path))[0]),

+             "module_name": str(splitext(basename(staged_data_filename("testmodule")))[0]),

          }

+         post_url = "/module-build-service/{0}/module-builds/".format(api_version)

          rv = self.client.post(post_url, data=json.dumps(post_data))

          data = json.loads(rv.data)

  
@@ -2183,7 +2178,7 @@ 

          module_build_service.utils.load_mmd(data["modulemd"])

  

          # Make sure the buildrequires entry was created

-         module = ModuleBuild.query.get(8)

+         module = ModuleBuild.get_by_id(db.session, 8)

          assert len(module.buildrequires) == 1

          assert module.buildrequires[0].name == "platform"

          assert module.buildrequires[0].stream == "f28"
@@ -2205,13 +2200,12 @@ 

      def test_submit_scratch_build_with_mmd_no_module_name(

          self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user

      ):

-         base_dir = path.abspath(path.dirname(__file__))

-         mmd_path = path.join(base_dir, "..", "staged_data", "testmodule.yaml")

+         post_data = {

+             "branch": "master",

+             "scratch": True,

+             "modulemd": read_staged_data("testmodule")

+         }

          post_url = "/module-build-service/1/module-builds/"

-         with open(mmd_path, "rb") as f:

-             modulemd = f.read().decode("utf-8")

- 

-         post_data = {"branch": "master", "scratch": True, "modulemd": modulemd}

          rv = self.client.post(post_url, data=json.dumps(post_data))

          assert rv.status_code == 400

          data = json.loads(rv.data)
@@ -2240,18 +2234,13 @@ 

      def test_submit_scratch_build_with_mmd_yaml_not_allowed(

          self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user, api_version

      ):

-         base_dir = path.abspath(path.dirname(__file__))

-         mmd_path = path.join(base_dir, "..", "staged_data", "testmodule.yaml")

-         post_url = "/module-build-service/{0}/module-builds/".format(api_version)

-         with open(mmd_path, "rb") as f:

-             modulemd = f.read().decode("utf-8")

- 

          post_data = {

              "branch": "master",

              "scratch": True,

-             "modulemd": modulemd,

-             "module_name": str(splitext(basename(mmd_path))[0]),

+             "modulemd": read_staged_data("testmodule"),

+             "module_name": str(splitext(basename(staged_data_filename("testmodule")))[0]),

          }

+         post_url = "/module-build-service/{0}/module-builds/".format(api_version)

          rv = self.client.post(post_url, data=json.dumps(post_data))

          data = json.loads(rv.data)

  
@@ -2283,7 +2272,7 @@ 

          init_data(data_size=1, multiple_stream_versions=True)

          # Create a platform for whatever the override is so the build submission succeeds

          if platform_override:

-             platform_mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml"))

+             platform_mmd = load_mmd(read_staged_data("platform"))

              platform_mmd = platform_mmd.copy(platform_mmd.get_module_name(), platform_override)

              if platform_override == "el8.0.0":

                  xmd = platform_mmd.get_xmd()
@@ -2330,7 +2319,7 @@ 

          mocked_regexes.return_value = [r"(?:\-LP\-)(.+)$"]

          init_data(data_size=1, multiple_stream_versions=True)

          # Create a platform for the override so the build submission succeeds

-         platform_mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml"))

+         platform_mmd = load_mmd(read_staged_data('platform'))

          platform_mmd = platform_mmd.copy(platform_mmd.get_module_name(), "product1.3")

          import_mmd(db.session, platform_mmd)

  
@@ -2369,7 +2358,7 @@ 

          versioning and no virtual streams, that the dependency resolution succeeds.

          """

          init_data(data_size=1, multiple_stream_versions=True)

-         platform_mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml"))

+         platform_mmd = load_mmd(read_staged_data("platform"))

          platform_mmd = platform_mmd.copy(platform_mmd.get_module_name(), "el8.0.0")

          import_mmd(db.session, platform_mmd)

  
@@ -2427,7 +2416,7 @@ 

      @patch("module_build_service.scm.SCM")

      def test_submit_build_request_platform_virtual_stream(self, mocked_scm, mocked_get_user):

          # Create a platform with el8.25.0 but with the virtual stream el8

-         mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(read_staged_data("platform"))

          mmd = mmd.copy(mmd.get_module_name(), "el8.25.0")

          xmd = mmd.get_xmd()

          xmd["mbs"]["virtual_streams"] = ["el8"]
@@ -2562,7 +2551,7 @@ 

          mock_pp_streams.return_value = pp_streams

          # Mock the Product Pages query

          mock_get.return_value.json.return_value = get_rv

-         mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml"))

+         mmd = load_mmd(read_staged_data("platform"))

          # Create the required platforms

          for stream in ("el8.0.0", "el8.0.0.z", "el8.2.1", "el8.2.1.z"):

              mmd = mmd.copy(mmd.get_module_name(), stream)