#298 Cache ODCS pulp composes with the same content_sets in record_batches.
Merged 5 years ago by jkaluza. Opened 5 years ago by jkaluza.
jkaluza/freshmaker cache-pulp-compose  into  master

@@ -503,6 +503,10 @@ 

          # Used as tmp dict with {brew_build_nvr: ArtifactBuild, ...} mapping.

          builds = builds or {}

  

+         # Cache for ODCS pulp composes. Key is white-spaced, sorted, list

+         # of content_sets. Value is Compose database object.

+         odcs_cache = {}

+ 

          for batch in batches:

              for image in batch:

                  nvr = image["brew"]["build"]
@@ -560,14 +564,25 @@ 

                  if state != ArtifactBuildState.FAILED.value:

                      # Store odcs pulp compose to build

                      if image["generate_pulp_repos"]:

-                         compose = self._prepare_pulp_repo(

-                             build, image["content_sets"])

-                         if build.state != ArtifactBuildState.FAILED.value:

-                             db_compose = Compose(odcs_compose_id=compose['id'])

-                             db.session.add(db_compose)

-                             db.session.commit()

-                             build.add_composes(db.session, [db_compose])

+                         # Check if the compose for these content_sets is

+                         # already cached and use it in this case.

+                         cache_key = " ".join(sorted(image["content_sets"]))

+                         if cache_key in odcs_cache:

+                             db_compose = odcs_cache[cache_key]

                          else:

+                             compose = self._prepare_pulp_repo(

+                                 build, image["content_sets"])

+ 

+                             if build.state != ArtifactBuildState.FAILED.value:

+                                 db_compose = Compose(odcs_compose_id=compose['id'])

+                                 db.session.add(db_compose)

+                                 db.session.commit()

+                                 odcs_cache[cache_key] = db_compose

+                             else:

+                                 db_compose = None

+                                 db.session.commit()

+                         if db_compose:

+                             build.add_composes(db.session, [db_compose])

                              db.session.commit()

  

                      # TODO: uncomment following code after boot.iso compose is

@@ -805,7 +805,7 @@ 

              side_effect=[{'id': 100}, {'id': 200}])

  

          self.patcher.patch_dict(

-             'freshmaker.models.EVENT_TYPES', {self.mock_event.__class__: -1})

+             'freshmaker.models.EVENT_TYPES', {self.mock_event.__class__: 0})

  

      def tearDown(self):

          super(TestRecordBatchesImages, self).tearDown()
@@ -1028,6 +1028,95 @@ 

          self.mock_request_boot_iso_compose.assert_called_once_with(

              batches[0][0])

  

+     def test_pulp_compose_generated_just_once(self):

+         batches = [

+             [ContainerImage({

+                 "brew": {

+                     "completion_date": "20170420T17:05:37.000-0400",

+                     "build": "rhel-server-docker-7.3-82",

+                     "package": "rhel-server-docker"

+                 },

+                 'parsed_data': {

+                     'layers': [

+                         'sha512:12345678980',

+                         'sha512:10987654321'

+                     ]

+                 },

+                 "parent": None,

+                 "content_sets": ["content-set-1"],

+                 "repository": "repo-1",

+                 "commit": "123456789",

+                 "target": "target-candidate",

+                 "git_branch": "rhel-7",

+                 "error": None,

+                 "arches": "x86_64",

+                 "generate_pulp_repos": True,

+             })],

+             [ContainerImage({

+                 "brew": {

+                     "build": "rh-dotnetcore10-docker-1.0-16",

+                     "package": "rh-dotnetcore10-docker",

+                     "completion_date": "20170511T10:06:09.000-0400"

+                 },

+                 'parsed_data': {

+                     'layers': [

+                         'sha512:2345af2e293',

+                         'sha512:12345678980',

+                         'sha512:10987654321'

+                     ]

+                 },

+                 "parent": ContainerImage({

+                     "brew": {

+                         "completion_date": "20170420T17:05:37.000-0400",

+                         "build": "rhel-server-docker-7.3-82",

+                         "package": "rhel-server-docker"

+                     },

+                     'parsed_data': {

+                         'layers': [

+                             'sha512:12345678980',

+                             'sha512:10987654321'

+                         ]

+                     },

+                     "parent": None,

+                     "content_sets": ["content-set-1"],

+                     "repository": "repo-1",

+                     "commit": "123456789",

+                     "target": "target-candidate",

+                     "git_branch": "rhel-7",

+                     "error": None

+                 }),

+                 "content_sets": ["content-set-1"],

+                 "repository": "repo-1",

+                 "commit": "987654321",

+                 "target": "target-candidate",

+                 "git_branch": "rhel-7",

+                 "error": None,

+                 "arches": "x86_64",

+                 "generate_pulp_repos": True,

+             })]

+         ]

+ 

+         handler = ErrataAdvisoryRPMsSignedHandler()

+         handler._record_batches(batches, self.mock_event)

+ 

+         query = db.session.query(ArtifactBuild)

+         parent_build = query.filter(

+             ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82'

+         ).first()

+         self.assertEqual(1, len(parent_build.composes))

+         compose_ids = sorted([rel.compose.odcs_compose_id

+                               for rel in parent_build.composes])

+         self.assertEqual([1], compose_ids)

+ 

+         child_build = query.filter(

+             ArtifactBuild.original_nvr == 'rh-dotnetcore10-docker-1.0-16'

+         ).first()

+         self.assertEqual(1, len(child_build.composes))

+ 

+         self.mock_prepare_pulp_repo.assert_has_calls([

+             call(parent_build, ["content-set-1"])

+         ])

+ 

      def test_no_parent(self):

          batches = [

              [ContainerImage({

For advisories with lot of images, we end up asking for the same ODCS pulp compose again and again. Instead, we can just reuse the old compose.

Suggestion: Why not just save it to cache just after call to self._prepare_pulp_repo?

Can this db_compose be cached as well with associated pulp compose together? It can save cost of a few SELECT queries.

Looks good for me generally. Two minor suggestions.

1 new commit added

  • Cache the Compose database object instead of ODCS compose dict.
5 years ago

@cqi: I've updated PR to address your comments, please check :).

Looks good for me. :thumbsup:

Commit a755b24 fixes this pull-request

Pull-Request has been merged by jkaluza

5 years ago

Pull-Request has been merged by jkaluza

5 years ago