#385 koji_block_retired: fix branches and koji tags
Closed a month ago by lenkaseg. Opened a month ago by lenkaseg.
fedora-infra/ lenkaseg/toddlers fix_koji_tags  into  main

@@ -723,3 +723,335 @@ 

              )

          else:

              self.mock_ipa_session.Command.group_remove_member_manager.assert_not_called()

+ 

+     @patch("toddlers.utils.notify.send_email")

+     def test_email_notification_sent_with_creator_failures(

+         self, mock_send_email, caplog

+     ):

+         """

+         Test that email notification is sent when creator removal failures occur.

+         """

+         self.mock_distgit.get_all_groups.return_value = ["dotnet-sig", "python-sig"]

+         self.mock_distgit.get_group_members.side_effect = [

+             ["rhea"],  # dotnet-sig

+             ["alice"],  # python-sig

+         ]

+ 

+         # Mock IPA to show users are not packagers

+         self.mock_ipa_session.Command.group_show.return_value = {

+             "result": {"member_user": [], "membermanager_user": []}

+         }

+ 

+         # Mock Pagure to raise creator errors for both users

+         creator_error = PagureError("The creator of a group cannot be removed")

+         self.mock_distgit.remove_member_from_group.side_effect = creator_error

+ 

+         config = {

+             "watched_groups": ["packager"],

+             "notify_emails": ["admin@example.com", "team@example.com"],

+             "admin_email": "toddlers@example.com",

+             "mail_server": "smtp.example.com",

+         }

+         caplog.set_level(logging.INFO)

+         self.toddler_cls.find_and_remove(config)

+ 

+         # Verify email was sent

+         mock_send_email.assert_called_once()

+         call_args = mock_send_email.call_args

+ 

+         # Verify email parameters

+         assert call_args.kwargs["to_addresses"] == [

+             "admin@example.com",

+             "team@example.com",

+         ]

+         assert call_args.kwargs["from_address"] == "toddlers@example.com"

+         assert (

+             call_args.kwargs["subject"]

+             == "Toddlers: Manual removal required for group creators"

+         )

+         assert call_args.kwargs["mail_server"] == "smtp.example.com"

+ 

+         # Verify email content includes both failures

+         email_content = call_args.kwargs["content"]

+         assert "dotnet-sig: rhea" in email_content

+         assert "python-sig: alice" in email_content

+         assert (

+             "could not be removed automatically because they are group creators"

+             in email_content

+         )

+         assert "Please remove these users manually" in email_content

+ 

+         # Verify log message

+         assert (

+             "Sending email notification about users requiring manual removal"

+             in caplog.text

+         )

+         assert "Email notification sent successfully" in caplog.text

+ 

+     @patch("toddlers.utils.notify.send_email")

+     def test_email_notification_not_sent_without_notify_emails(

+         self, mock_send_email, caplog

+     ):

+         """

+         Test that email notification is not sent when notify_emails is not configured.

+         """

+         self.mock_distgit.get_all_groups.return_value = ["group1"]

+         self.mock_distgit.get_group_members.return_value = ["creator_user"]

+ 

+         # Mock IPA to show user is not a packager

+         self.mock_ipa_session.Command.group_show.return_value = {

+             "result": {"member_user": [], "membermanager_user": []}

+         }

+ 

+         # Mock Pagure to raise creator error

+         creator_error = PagureError("The creator of a group cannot be removed")

+         self.mock_distgit.remove_member_from_group.side_effect = creator_error

+ 

+         config = {

+             "watched_groups": ["packager"],

+             # notify_emails not configured

+             "admin_email": "toddlers@example.com",

+             "mail_server": "smtp.example.com",

+         }

+         caplog.set_level(logging.INFO)

+         self.toddler_cls.find_and_remove(config)

+ 

+         # Verify email was NOT sent

+         mock_send_email.assert_not_called()

+ 

+         # Verify warning about creator failures is still logged

+         assert "The following users could not be removed automatically" in caplog.text

+ 

+     @patch("toddlers.utils.notify.send_email")

+     def test_email_notification_not_sent_without_creator_failures(

+         self, mock_send_email, caplog

+     ):

+         """

+         Test that email notification is not sent when there are no creator failures.

+         """

+         self.mock_distgit.get_all_groups.return_value = ["group1"]

+         self.mock_distgit.get_group_members.return_value = ["regular_user"]

+ 

+         # Mock IPA to show user is not a packager

+         self.mock_ipa_session.Command.group_show.return_value = {

+             "result": {"member_user": [], "membermanager_user": []}

+         }

+ 

+         # Mock successful removal (no error)

+         self.mock_distgit.remove_member_from_group.return_value = None

+ 

+         config = {

+             "watched_groups": ["packager"],

+             "notify_emails": ["admin@example.com"],

+             "admin_email": "toddlers@example.com",

+             "mail_server": "smtp.example.com",

+         }

+         caplog.set_level(logging.INFO)

+         self.toddler_cls.find_and_remove(config)

+ 

+         # Verify email was NOT sent

+         mock_send_email.assert_not_called()

+ 

+         # Verify no warning about creator failures

+         assert (

+             "The following users could not be removed automatically" not in caplog.text

+         )

+ 

+     @patch("toddlers.utils.notify.send_email")

+     def test_email_notification_error_handled(self, mock_send_email, caplog):

+         """

+         Test that errors sending email notifications are caught and logged.

+         """

+         self.mock_distgit.get_all_groups.return_value = ["group1"]

+         self.mock_distgit.get_group_members.return_value = ["creator_user"]

+ 

+         # Mock IPA to show user is not a packager

+         self.mock_ipa_session.Command.group_show.return_value = {

+             "result": {"member_user": [], "membermanager_user": []}

+         }

+ 

+         # Mock Pagure to raise creator error

+         creator_error = PagureError("The creator of a group cannot be removed")

+         self.mock_distgit.remove_member_from_group.side_effect = creator_error

+ 

+         # Mock send_email to raise an exception

+         mock_send_email.side_effect = Exception("SMTP connection failed")

+ 

+         config = {

+             "watched_groups": ["packager"],

+             "notify_emails": ["admin@example.com"],

+             "admin_email": "toddlers@example.com",

+             "mail_server": "smtp.example.com",

+         }

+         caplog.set_level(logging.INFO)

+         self.toddler_cls.find_and_remove(config)

+ 

+         # Verify email was attempted

+         mock_send_email.assert_called_once()

+ 

+         # Verify error was logged

+         assert (

+             "Failed to send email notification: SMTP connection failed" in caplog.text

+         )

+ 

+         # Verify process completed despite email error

+         assert "Completed find_and_remove process" in caplog.text

+ 

+     @pytest.mark.parametrize(

+         "distgit_groups,ignored_groups,expected_processed_groups,expected_log",

+         [

+             (

+                 ["group1", "group2"],

+                 [],

+                 ["group1", "group2"],

+                 None,

+             ),

+             (

+                 ["group1", "ignored-group", "group2"],

+                 ["ignored-group"],

+                 ["group1", "group2"],

+                 "Skipping ignored group ignored-group",

+             ),

+         ],

+         ids=["no_ignored_groups", "with_ignored_groups"],

+     )

+     def test_find_and_remove_with_ignored_groups(

+         self,

+         distgit_groups,

+         ignored_groups,

+         expected_processed_groups,

+         expected_log,

+         caplog,

+     ):

+         """

+         Test that ignored_groups are properly handled in find_and_remove.

+         """

+         self.mock_distgit.get_all_groups.return_value = distgit_groups

+         self.mock_distgit.get_group_members.return_value = ["user1"]

+ 

+         # Mock IPA to show users are not packagers

+         self.mock_ipa_session.Command.group_show.return_value = {

+             "result": {"member_user": [], "membermanager_user": []}

+         }

+ 

+         config = {"watched_groups": ["packager"], "ignored_groups": ignored_groups}

+         caplog.set_level(logging.INFO)

+         self.toddler_cls.find_and_remove(config)

+ 

+         # Verify get_group_members was called for expected groups only

+         assert self.mock_distgit.get_group_members.call_count == len(

+             expected_processed_groups

+         )

+         for group in expected_processed_groups:

+             self.mock_distgit.get_group_members.assert_any_call(group)

+ 

+         # Verify log message if expected

+         if expected_log:

+             assert expected_log in caplog.text

+ 

+     @pytest.mark.parametrize(

+         "distgit_groups,ignored_groups,ipa_member_groups,expected_removals,expected_log",

+         [

+             (

+                 ["group1", "group2"],

+                 [],

+                 ["group1", "group2"],

+                 ["packager", "group1", "group2"],

+                 None,

+             ),

+             (

+                 ["group1", "ignored-group", "group2"],

+                 ["ignored-group"],

+                 ["group1", "ignored-group", "group2"],

+                 ["packager", "group1", "group2"],

+                 "Filtered out ignored groups: ['ignored-group']",

+             ),

+         ],

+         ids=["no_ignored_groups", "with_ignored_groups"],

+     )

+     def test_remove_user_with_ignored_groups(

+         self,

+         distgit_groups,

+         ignored_groups,

+         ipa_member_groups,

+         expected_removals,

+         expected_log,

+         caplog,

+     ):

+         """

+         Test that ignored_groups are filtered in remove_user method.

+         """

+         self.mock_distgit.get_all_groups.return_value = distgit_groups

+ 

+         # Mock IPA user info

+         self.mock_ipa_session.Command.user_show.return_value = {

+             "result": {"memberof_group": ipa_member_groups}

+         }

+ 

+         # Mock group_show to return empty (user should be removed)

+         self.mock_ipa_session.Command.group_show.return_value = {

+             "result": {"member_user": [], "membermanager_user": []}

+         }

+ 

+         self.mock_distgit.remove_member_from_group.return_value = None

+ 

+         config = {"watched_groups": ["packager"], "ignored_groups": ignored_groups}

+ 

+         # Create a mock message

+         from unittest.mock import MagicMock

+ 

+         message = MagicMock()

+         message.group = "packager"

+         message.user_names = ["testuser"]

+ 

+         caplog.set_level(logging.INFO)

+         self.toddler_cls.remove_user(config, message)

+ 

+         # Verify user was removed from expected groups

+         assert self.mock_distgit.remove_member_from_group.call_count == len(

+             expected_removals

+         )

+         for group in expected_removals:

+             self.mock_distgit.remove_member_from_group.assert_any_call(

+                 "testuser", group

+             )

+ 

+         # Verify log message if expected

+         if expected_log:

+             assert expected_log in caplog.text

+ 

+     @pytest.mark.parametrize(

+         "triggering_group,ignored_groups,should_skip",

+         [

+             ("packager", [], False),

+             ("packager", ["packager"], True),

+         ],

+         ids=["not_ignored", "ignored_triggering_group"],

+     )

+     def test_process_removal_with_ignored_groups(

+         self, triggering_group, ignored_groups, should_skip, caplog

+     ):

+         """

+         Test that _process_removal handles ignored triggering groups.

+         """

+         config = {"watched_groups": ["packager"], "ignored_groups": ignored_groups}

+         user = "testuser"

+         distgit_groups = ["group1", "group2"]

+ 

+         caplog.set_level(logging.INFO)

+         self.toddler_cls._process_removal(

+             config, user, triggering_group, distgit_groups

+         )

+ 

+         if should_skip:

+             # Verify it was skipped

+             assert (

+                 f"Skipping removal from ignored group {triggering_group}" in caplog.text

+             )

+             # Verify no removal was attempted

+             self.mock_distgit.remove_member_from_group.assert_not_called()

+             self.mock_ipa_session.Command.user_show.assert_not_called()

+         else:

+             # Verify removal was attempted

+             self.mock_distgit.remove_member_from_group.assert_called()

+             self.mock_ipa_session.Command.user_show.assert_called()

@@ -701,26 +701,41 @@ 

          )

          self.toddler_cls.koji_session.packageListBlock.assert_not_called()

          assert (

-             caplog.records[-4].message

+             caplog.records[-5].message

              == "Checking if package epel_pkg is blocked in koji for release epel9"

          )

-         assert caplog.records[-3].message == "Package epel_pkg is not blocked in koji."

+         assert caplog.records[-4].message == "Package epel_pkg is not blocked in koji."

  

          assert (

-             caplog.records[-2].message

+             caplog.records[-3].message

              == "Last check before blocking: is package epel_pkg really retired on epel9?"

          )

          assert (

-             caplog.records[-1].message

+             caplog.records[-2].message

              == "Package epel_pkg is not retired on the distgit branch epel9, "

              "was probably unretired recently, bailing."

          )

+         assert (

+             caplog.records[-1].message

+             == "All packages that should be blocked in this run: {}"

+         )

  

+     @pytest.mark.parametrize(

+         "active_branches,branch,pkg,tag,uses_get_tag",

+         [

+             # EPEL branch: uses get_tag_from_target

+             (["epel9"], "epel9", "epel_pkg", "epel9", True),

+             # Fedora branch (not rawhide): koji_tag = active_release directly

+             # Include rawhide so f41 is preserved by adjust_releases_for_lookaside

+             (["f41", "f42", "rawhide"], "f41", "fedora_pkg", "f41", False),

+         ],

+         ids=("epel", "fedora"),

+     )

      @patch("requests.get")

      @patch("toddlers.plugins.koji_block_retired.KojiBlockRetired.get_tag_from_target")

      @patch("toddlers.plugins.koji_block_retired.bodhi.set_bodhi")

      @patch("toddlers.plugins.koji_block_retired.KojiBlockRetired._create_session")

-     def test_playtime_call_epel_blocked(

+     def test_playtime_call_blocked(

          self,

          mock_create_session,

          mock_set_bodhi,
@@ -728,32 +743,37 @@ 

          mock_req,

          config,

          caplog,

+         active_branches,

+         branch,

+         pkg,

+         tag,

+         uses_get_tag,

      ):

          mock_bodhi = MagicMock()

-         mock_bodhi.get_active_branches.return_value = [

-             "epel9",

-         ]

+         mock_bodhi.get_active_branches.return_value = active_branches

          mock_bodhi.is_branch_frozen.return_value = False

+         mock_bodhi.is_branch_stable.return_value = False

          mock_set_bodhi.return_value = mock_bodhi

-         mock_tag.return_value = "epel9"

+         mock_tag.return_value = tag

          caplog.set_level(logging.INFO)

          message = MagicMock()

          message.topic = "toddlers.trigger.koji_block_retired"

-         mock_response = MagicMock()

-         mock_response.status_code = 200

-         mock_response.ok = True

-         mock_response.json.return_value = {

-             "epel9": [

-                 "epel_pkg",

-             ]

-         }

+ 

+         # Create mock responses for all branches that will be processed

+         mock_responses = {}

+         for b in active_branches:

+             mock_resp = MagicMock()

+             mock_resp.status_code = 200

+             mock_resp.ok = True

+             # Only the target branch has a package, others are empty

+             mock_resp.json.return_value = {b: [pkg] if b == branch else []}

+             lookaside_url = (

+                 f"https://src.fedoraproject.org/lookaside/retired_in_{b}.json"

+             )

+             mock_responses[lookaside_url] = mock_resp

  

          def req(*args, **kwargs):

-             if (

-                 args[0]

-                 == "https://src.fedoraproject.org/lookaside/retired_in_epel9.json"

-             ):

-                 return mock_response

+             return mock_responses.get(args[0])

  

          mock_req.side_effect = req

          self.toddler_cls.koji_session.listPackages.__name__ = "listPackages"
@@ -764,15 +784,20 @@ 

              }

          ]

          self.toddler_cls.process(config, message)

-         self.toddler_cls.koji_session.listPackages.assert_called_once_with(

-             pkgID="epel_pkg",

-             tagID="epel9",

+         self.toddler_cls.koji_session.listPackages.assert_called_with(

+             pkgID=pkg,

+             tagID=tag,

          )

          self.toddler_cls.koji_session.packageListBlock.assert_not_called()

          assert (

              caplog.records[-1].message

              == "All packages that should be blocked in this run: {}"

          )

+         # Verify get_tag_from_target is only called for epel/rawhide branches

+         if uses_get_tag:

+             mock_tag.assert_called()

+         else:

+             mock_tag.assert_called()  # rawhide in the list still calls get_tag_from_target

  

      @patch("requests.get")

      @patch("toddlers.plugins.koji_block_retired.KojiBlockRetired.get_tag_from_target")
@@ -902,7 +927,7 @@ 

          )

          assert (

              caplog.records[-6].message

-             == "Last check before blocking: is package fedora_pkg really retired on f42?"

+             == "Last check before blocking: is package fedora_pkg really retired on rawhide?"

          )

          assert caplog.records[-5].message == "Blocking package fedora_pkg, tag: f42"

          assert (
@@ -1188,49 +1213,66 @@ 

          assert caplog.records[-2].message == "Cannot block on stable branches: f41"

  

      @pytest.mark.parametrize(

-         "releases",

+         "releases,expected",

          [

-             [

-                 "epel8",

-                 "epel9",

-                 "epel10",

-                 "f40",

-                 "f41",

-                 "f42",

-                 "eln",

-                 "epel9-next",

-                 "f39",

-                 "f39",

-             ],

-             [

-                 "eln",

-                 "epel10",

-                 "epel8",

-                 "epel9",

-                 "epel9-next",

-                 "f39",

-                 "f39",

-                 "f39",

-                 "f40",

-                 "f40",

-                 "f40",

-                 "f41",

-                 "f41",

-                 "f41",

-                 "rawhide",

-                 "f42",

-                 "f42",

-             ],

+             # without_rawhide: f42 (highest) is replaced with rawhide

+             (

+                 [

+                     "epel8",

+                     "epel9",

+                     "epel10",

+                     "f40",

+                     "f41",

+                     "f42",

+                     "eln",

+                     "epel9-next",

+                     "f39",

+                     "f39",

+                 ],

+                 [

+                     "epel10",

+                     "epel8",

+                     "epel9",

+                     "f39",

+                     "f40",

+                     "f41",

+                     "rawhide",

+                 ],

+             ),

+             # with_rawhide: f42 is preserved (it's a branched release, not rawhide)

+             (

+                 [

+                     "eln",

+                     "epel10",

+                     "epel8",

+                     "epel9",

+                     "epel9-next",

+                     "f39",

+                     "f39",

+                     "f39",

+                     "f40",

+                     "f40",

+                     "f40",

+                     "f41",

+                     "f41",

+                     "f41",

+                     "rawhide",

+                     "f42",

+                     "f42",

+                 ],

+                 [

+                     "epel10",

+                     "epel8",

+                     "epel9",

+                     "f39",

+                     "f40",

+                     "f41",

+                     "f42",

+                     "rawhide",

+                 ],

+             ),

          ],

          ids=("without_rawhide", "with_rawhide"),

      )

-     def test_adjust_releases_for_lookaside(self, releases):

-         assert self.toddler_cls.adjust_releases_for_lookaside(releases) == [

-             "epel10",

-             "epel8",

-             "epel9",

-             "f39",

-             "f40",

-             "f41",

-             "rawhide",

-         ]

+     def test_adjust_releases_for_lookaside(self, releases, expected):

+         assert self.toddler_cls.adjust_releases_for_lookaside(releases) == expected

@@ -134,7 +134,10 @@ 

      @patch("toddlers.utils.pagure.set_pagure")

      @patch("toddlers.utils.fedora_account.set_fasjson")

      @patch("toddlers.utils.bugzilla_system.set_bz")

-     def test_process_exception(self, mock_bugzilla, mock_fasjson, mock_pagure, toddler):

+     @patch("toddlers.utils.anitya.set_anitya")

+     def test_process_exception(

+         self, mock_anitya, mock_bugzilla, mock_fasjson, mock_pagure, toddler

+     ):

          """

          Assert that message toddler will be initialized correctly, if message passes

          initial processing.
@@ -180,12 +183,16 @@ 

          )

          mock_fasjson.assert_called_with(config)

          mock_bugzilla.assert_called_with(config)

+         mock_anitya.assert_called_with(config)

          mock_pagure_io.add_comment_to_issue.assert_called_once()

  

+     @patch("toddlers.utils.anitya.set_anitya")

      @patch("toddlers.utils.pagure.set_pagure")

      @patch("toddlers.utils.fedora_account.set_fasjson")

      @patch("toddlers.utils.bugzilla_system.set_bz")

-     def test_process(self, mock_bugzilla, mock_fasjson, mock_pagure, toddler):

+     def test_process(

+         self, mock_bugzilla, mock_fasjson, mock_pagure, mock_anitya, toddler

+     ):

          """

          Assert that message toddler will be initialized correctly, if message passes

          initial processing.
@@ -227,11 +234,15 @@ 

          )

          mock_fasjson.assert_called_with(config)

          mock_bugzilla.assert_called_with(config)

+         mock_anitya.assert_called_with(config)

  

+     @patch("toddlers.utils.anitya.set_anitya")

      @patch("toddlers.utils.pagure.set_pagure")

      @patch("toddlers.utils.fedora_account.set_fasjson")

      @patch("toddlers.utils.bugzilla_system.set_bz")

-     def test_process_comment(self, mock_bugzilla, mock_fasjson, mock_pagure, toddler):

+     def test_process_comment(

+         self, mock_bugzilla, mock_fasjson, mock_pagure, mock_anitya, toddler

+     ):

          """

          Assert that toddler will handle comments correctly.

          """
@@ -715,7 +726,9 @@ 

          ) as mock_create_new_repo:

              self.toddler.process_comment(issue)

  

-             mock_create_new_repo.assert_called_with(issue, {"action": "new_repo"})

+             mock_create_new_repo.assert_called_with(

+                 issue, {"action": "new_repo"}, initial_commit=True

+             )

  

      def test_process_comment_valid_command_wrong_action(self):

          """
@@ -791,6 +804,7 @@ 

          self.toddler = scm_request_processor.SCMRequestProcessor()

          self.toddler.pagure_io = Mock()

          self.toddler.dist_git = Mock()

+         self.toddler.anitya = Mock()

          self.toddler.ping_comment = "{maintainers}"

  

      def test_process_new_repo_missing_required_key(self):
@@ -800,21 +814,32 @@ 

          issue = {

              "id": 100,

          }

-         self.toddler.process_new_repo(issue, {})

+         json = {

+             "repo": "+a",

+             "branch": "rawhide",

+             "namespace": "rpms",

+             "bug_id": "123",

+             "action": "new_repo",

+             "sls": {"rawhide": "2050-06-01"},

+             "monitor": "monitoring",

+         }

+         self.toddler.process_new_repo(issue, json)

  

          self.toddler.pagure_io.close_issue.assert_called_with(

              100,

              namespace=scm_request_processor.PROJECT_NAMESPACE,

-             message="Invalid body, missing required field: repo",

+             message="Invalid body, missing required field: upstreamurl",

              reason="Invalid",

          )

  

-     def test_process_new_repo_invalid_repo_name(self):

+     def test_process_new_repo_missing_required_key_for_monitor(self):

          """

-         Assert that ticket will be closed if provided repository name is invalid.

+         Assert that ticket will be closed if required key for monitor

+         is missing in request.

          """

-         issue = {"id": 100, "user": {"name": "zlopez"}}

- 

+         issue = {

+             "id": 100,

+         }

          json = {

              "repo": "+a",

              "branch": "rawhide",
@@ -822,40 +847,62 @@ 

              "bug_id": "123",

              "action": "new_repo",

              "sls": {"rawhide": "2050-06-01"},

-             "monitor": "monitor",

+             "monitor": "monitoring",

+             "upstreamurl": "",

+             "backend": "GitLab",

          }

- 

          self.toddler.process_new_repo(issue, json)

  

-         error = (

-             "The repository name is invalid. It must be at least two "

-             "characters long with only letters, numbers, hyphens, "

-             "underscores, plus signs, and/or periods. Please note that "

-             "the project cannot start with a period or a plus sign. "

-             "Repository name can't be longer than 64 characters."

+         self.toddler.pagure_io.close_issue.assert_called_with(

+             100,

+             namespace=scm_request_processor.PROJECT_NAMESPACE,

+             message="Invalid body, missing required field: project_name",

+             reason="Invalid",

          )

  

+     def test_process_new_repo_monitor_accepts_different_options(self):

+         """

+         Assert that ticket will be closed if required key for monitor

+         is missing in request.

+         """

+         issue = {

+             "id": 100,

+         }

+         json = {

+             "repo": "+a",

+             "branch": "rawhide",

+             "namespace": "rpms",

+             "bug_id": "123",

+             "action": "new_repo",

+             "sls": {"rawhide": "2050-06-01"},

+             "monitor": "monitoring11",

+             "upstreamurl": "",

+             "backend": "GitLab",

+         }

+         self.toddler.process_new_repo(issue, json)

+ 

          self.toddler.pagure_io.close_issue.assert_called_with(

              100,

              namespace=scm_request_processor.PROJECT_NAMESPACE,

-             message=error,

+             message="Invalid body, missing required field: project_name",

              reason="Invalid",

          )

  

-     def test_process_new_repo_long_repo_name(self):

+     def test_process_new_repo_invalid_repo_name(self):

          """

          Assert that ticket will be closed if provided repository name is invalid.

          """

          issue = {"id": 100, "user": {"name": "zlopez"}}

  

          json = {

-             "repo": "".join("a" for _ in range(65)),

+             "repo": "+a",

              "branch": "rawhide",

              "namespace": "rpms",

              "bug_id": "123",

              "action": "new_repo",

              "sls": {"rawhide": "2050-06-01"},

-             "monitor": "monitor",

+             "monitor": "no-monitoring",

+             "upstreamurl": "",

          }

  

          self.toddler.process_new_repo(issue, json)
@@ -888,7 +935,8 @@ 

              "bug_id": "",

              "action": "new_repo",

              "sls": {"rawhide": "2050-06-01"},

-             "monitor": "monitor",

+             "monitor": "no-monitoring",

+             "upstreamurl": "",

          }

  

          self.toddler.dist_git.get_project.return_value = None
@@ -917,7 +965,8 @@ 

              "bug_id": "123",

              "action": "new_repo",

              "sls": {"rawhide": "2050-06-01"},

-             "monitor": "monitor",

+             "monitor": "no-monitoring",

+             "upstreamurl": "",

          }

  

          self.toddler.dist_git.get_project.return_value = None
@@ -951,7 +1000,8 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {"rawhide": "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = False

          json = {

              "repo": repo,
@@ -961,6 +1011,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

  
@@ -990,7 +1041,8 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {"rawhide": "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = False

          json = {

              "repo": repo,
@@ -1000,6 +1052,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

  
@@ -1020,7 +1073,10 @@ 

              comment=message,

          )

  

-     def test_process_new_repo_master_branch(self):

+     @patch(

+         "toddlers.plugins.scm_request_processor.SCMRequestProcessor.validate_review_bug"

+     )

+     def test_process_new_repo_master_branch(self, mock_validate_review_bug):

          """

          Assert that ticket will be closed when branch is set to master branch.

          Master branch is no longer allowed.
@@ -1033,7 +1089,8 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {"rawhide": "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = False

          json = {

              "repo": repo,
@@ -1043,6 +1100,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

          self.toddler.dist_git.get_project.return_value = None
@@ -1068,7 +1126,8 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {"rawhide": "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = False

          json = {

              "repo": repo,
@@ -1078,6 +1137,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

          self.toddler.process_new_repo(issue, json)
@@ -1120,11 +1180,12 @@ 

          }

  

          repo = "repo"

-         bug_id = ""

+         bug_id = "11"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

-         exception = True

+         monitor = "no-monitoring"

+         upstreamurl = ""

+         exception = False

          json = {

              "repo": repo,

              "branch": branch,
@@ -1133,6 +1194,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

          dist_git_url = "https://src.fp.o"
@@ -1141,9 +1203,12 @@ 

          self.toddler.pagure_io.get_project_contributors.return_value = {

              "users": {"admin": [user], "commit": [], "ticket": []}

          }

+         self.toddler.validation_comment = "valid"

+         self.toddler.validate_review_bug = Mock()

  

          # Method to test

-         self.toddler.process_new_repo(issue, json)

+         with patch("toddlers.plugins.scm_request_processor.bugzilla_system"):

+             self.toddler.process_new_repo(issue, json)

  

          # asserts

          self.toddler.pagure_io.add_comment_to_issue.assert_called_with(
@@ -1176,7 +1241,8 @@ 

          bug_id = ""

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = True

          json = {

              "repo": repo,
@@ -1186,6 +1252,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

          dist_git_url = "https://src.fp.o"
@@ -1203,9 +1270,10 @@ 

  

      @patch("toddlers.plugins.scm_request_processor.bugzilla_system")

      @patch(

-         "toddlers.plugins.scm_request_processor.SCMRequestProcessor.validate_review_bug"

+         "toddlers.plugins.scm_request_processor.SCMRequestProcessor._validate_new_repo_request",

+         return_value=True,

      )

-     def test_process_new_repo_project_exists(self, mock_validate_review_bug, mock_bz):

+     def test_process_new_repo_project_exists(self, mock_validate_request, mock_bz):

          """

          Assert that ticket will be processed correctly when repo already

          exists in dist git.
@@ -1218,7 +1286,7 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {"rawhide": "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

          exception = False

          json = {

              "repo": repo,
@@ -1229,6 +1297,7 @@ 

              "sls": sls,

              "monitor": monitor,

              "exception": exception,

+             "upstreamurl": "",

          }

  

          dist_git_url = "https://src.fp.o"
@@ -1270,7 +1339,7 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

          exception = False

          json = {

              "repo": repo,
@@ -1281,6 +1350,7 @@ 

              "sls": sls,

              "monitor": monitor,

              "exception": exception,

+             "upstreamurl": "",

          }

          self.toddler.branch_slas = {"rawhide": {"rawhide": "2050-06-01"}}

  
@@ -1322,6 +1392,314 @@ 

          )

          mock_bz.change_bug_status.assert_called_with(bug_id, "RELEASE_PENDING", message)

  

+     @patch(

+         "toddlers.plugins.scm_request_processor.SCMRequestProcessor._validate_new_repo_request",

+         return_value=True,

+     )

+     def test_process_new_repo_monitoring_project_created_successfully_package_exist(

+         self,

+         mock_validate_request,

+     ):

+         """

+         Assert that ticket will be processed with correct Monitoring message

+         when project and package exists.

+         """

+         # Preparation

+         user = "zlopez"

+         issue = {

+             "id": 100,

+             "user": {"name": user},

+         }

+ 

+         repo = "repo"

+         branch = "main"

+         namespace = "tests"

+         bug_id = ""

+         action = "new_repo"

+         sls = {branch: "2050-06-01"}

+         monitor = "monitoring"

+         upstreamurl = ""

+         backend = "custom"

+         distibution = "Fedora"

+         project_name = "test_project"

+         exception = False

+         json = {

+             "repo": repo,

+             "branch": branch,

+             "namespace": namespace,

+             "bug_id": bug_id,

+             "action": action,

+             "sls": sls,

+             "monitor": monitor,

+             "upstreamurl": upstreamurl,

+             "backend": backend,

+             "distribution": distibution,

+             "project_name": project_name,

+             "exception": exception,

+         }

+         dist_git_url = "https://src.fp.o"

+         self.toddler.dist_git._pagure_url = dist_git_url

+         self.toddler.dist_git.get_project.return_value = {"access_users": {"owner": []}}

+         anitya_project_url = "https://release-monitoring.org/project/123"

+         self.toddler.anitya.does_project_exists_in_anitya = Mock(

+             return_value=anitya_project_url

+         )

+         self.toddler.anitya.does_package_exists_in_anitya = Mock(return_value=True)

+         project_msg = (

+             "Anitya project is accessible by this link \n`{0}`\n "

+             "you can modify it manually.".format(anitya_project_url)

+         )

+ 

+         self.toddler.process_new_repo(issue, json)

+ 

+         self.toddler.dist_git.set_monitoring_status.assert_called_with(

+             namespace, repo, monitor

+         )

+         monitoring_msg = "\nMonitoring:\n{0}\n".format(project_msg)

+ 

+         message = "The Pagure repository was created at {0}/{1}/{2}{3}".format(

+             dist_git_url, namespace, repo, monitoring_msg

+         )

+ 

+         self.toddler.pagure_io.close_issue.assert_called_with(

+             100,

+             namespace=scm_request_processor.PROJECT_NAMESPACE,

+             message=message,

+             reason="Processed",

+         )

+ 

+     @patch(

+         "toddlers.plugins.scm_request_processor.SCMRequestProcessor._validate_new_repo_request",

+         return_value=True,

+     )

+     def test_process_new_repo_monitoring_project_was_not_created(

+         self,

+         mock_validate_request,

+     ):

+         """

+         Assert that ticket will be processed with correct Monitoring message

+         when project and package exists.

+         """

+         # Preparation

+         user = "zlopez"

+         issue = {

+             "id": 100,

+             "user": {"name": user},

+         }

+ 

+         repo = "repo"

+         branch = "main"

+         namespace = "tests"

+         bug_id = ""

+         action = "new_repo"

+         sls = {branch: "2050-06-01"}

+         monitor = "monitoring"

+         upstreamurl = ""

+         backend = "custom"

+         distibution = "Fedora"

+         project_name = "test_project"

+         exception = False

+         json = {

+             "repo": repo,

+             "branch": branch,

+             "namespace": namespace,

+             "bug_id": bug_id,

+             "action": action,

+             "sls": sls,

+             "monitor": monitor,

+             "upstreamurl": upstreamurl,

+             "backend": backend,

+             "distribution": distibution,

+             "project_name": project_name,

+             "exception": exception,

+         }

+         dist_git_url = "https://src.fp.o"

+         self.toddler.dist_git._pagure_url = dist_git_url

+         self.toddler.dist_git.get_project.return_value = {"access_users": {"owner": []}}

+         self.toddler.anitya.does_project_exists_in_anitya = Mock(return_value=None)

+         self.toddler.anitya.create_project_in_anitya = Mock(return_value=None)

+         project_msg = (

+             "Wasn't able to create project in Anitya. "

+             "You can create it manually on: `https://release-monitoring.org`"

+         )

+ 

+         self.toddler.process_new_repo(issue, json)

+ 

+         self.toddler.dist_git.set_monitoring_status.assert_called_with(

+             namespace, repo, monitor

+         )

+         monitoring_msg = "\nMonitoring:\n{0}\n".format(project_msg)

+ 

+         message = "The Pagure repository was created at {0}/{1}/{2}{3}".format(

+             dist_git_url, namespace, repo, monitoring_msg

+         )

+ 

+         self.toddler.pagure_io.close_issue.assert_called_with(

+             100,

+             namespace=scm_request_processor.PROJECT_NAMESPACE,

+             message=message,

+             reason="Processed",

+         )

+ 

+     @patch(

+         "toddlers.plugins.scm_request_processor.SCMRequestProcessor._validate_new_repo_request",

+         return_value=True,

+     )

+     def test_process_new_repo_monitoring_creating_package(

+         self,

+         mock_validate_request,

+     ):

+         """

+         Assert that ticket will be processed with correct Monitoring message

+         when project and package exists.

+         """

+         # Preparation

+         user = "zlopez"

+         issue = {

+             "id": 100,

+             "user": {"name": user},

+         }

+ 

+         repo = "repo"

+         branch = "main"

+         namespace = "tests"

+         bug_id = ""

+         action = "new_repo"

+         sls = {branch: "2050-06-01"}

+         monitor = "monitoring"

+         upstreamurl = ""

+         backend = "custom"

+         distibution = "Fedora"

+         project_name = "test_project"

+         exception = False

+         json = {

+             "repo": repo,

+             "branch": branch,

+             "namespace": namespace,

+             "bug_id": bug_id,

+             "action": action,

+             "sls": sls,

+             "monitor": monitor,

+             "upstreamurl": upstreamurl,

+             "backend": backend,

+             "distribution": distibution,

+             "project_name": project_name,

+             "exception": exception,

+         }

+         dist_git_url = "https://src.fp.o"

+         self.toddler.dist_git._pagure_url = dist_git_url

+         self.toddler.dist_git.get_project.return_value = {"access_users": {"owner": []}}

+         anitya_project_url = "https://release-monitoring.org/project/123"

+         self.toddler.anitya.does_project_exists_in_anitya = Mock(

+             return_value=anitya_project_url

+         )

+         self.toddler.anitya.does_package_exists_in_anitya = Mock(return_value=False)

+         self.toddler.anitya.create_package_in_anitya = Mock(return_value="Success")

+         project_msg = (

+             "Anitya project is accessible by this link \n`{0}`\n "

+             "you can modify it manually.".format(anitya_project_url)

+         )

+         package_msg = "Package was created in Anitya"

+ 

+         self.toddler.process_new_repo(issue, json)

+ 

+         self.toddler.dist_git.set_monitoring_status.assert_called_with(

+             namespace, repo, monitor

+         )

+         monitoring_msg = "\nMonitoring:\n{0}\n{1}".format(project_msg, package_msg)

+ 

+         message = "The Pagure repository was created at {0}/{1}/{2}{3}".format(

+             dist_git_url, namespace, repo, monitoring_msg

+         )

+ 

+         self.toddler.pagure_io.close_issue.assert_called_with(

+             100,

+             namespace=scm_request_processor.PROJECT_NAMESPACE,

+             message=message,

+             reason="Processed",

+         )

+ 

+     @patch(

+         "toddlers.plugins.scm_request_processor.SCMRequestProcessor._validate_new_repo_request",

+         return_value=True,

+     )

+     def test_process_new_repo_monitoring_creating_package_fails(

+         self,

+         mock_validate_request,

+     ):

+         """

+         Assert that ticket will be processed with correct Monitoring message

+         when project and package exists.

+         """

+         # Preparation

+         user = "zlopez"

+         issue = {

+             "id": 100,

+             "user": {"name": user},

+         }

+ 

+         repo = "repo"

+         branch = "main"

+         namespace = "tests"

+         bug_id = ""

+         action = "new_repo"

+         sls = {branch: "2050-06-01"}

+         monitor = "monitoring"

+         upstreamurl = ""

+         backend = "custom"

+         distibution = "Fedora"

+         project_name = "test_project"

+         exception = False

+         json = {

+             "repo": repo,

+             "branch": branch,

+             "namespace": namespace,

+             "bug_id": bug_id,

+             "action": action,

+             "sls": sls,

+             "monitor": monitor,

+             "upstreamurl": upstreamurl,

+             "backend": backend,

+             "distribution": distibution,

+             "project_name": project_name,

+             "exception": exception,

+         }

+         dist_git_url = "https://src.fp.o"

+         self.toddler.dist_git._pagure_url = dist_git_url

+         self.toddler.dist_git.get_project.return_value = {"access_users": {"owner": []}}

+         anitya_project_url = "https://release-monitoring.org/project/123"

+         self.toddler.anitya.does_project_exists_in_anitya = Mock(

+             return_value=anitya_project_url

+         )

+         self.toddler.anitya.does_package_exists_in_anitya = Mock(return_value=False)

+         response_msg = "Unauthorized, access token is incorrect."

+         self.toddler.anitya.create_package_in_anitya = Mock(return_value=response_msg)

+         project_msg = (

+             "Anitya project is accessible by this link \n`{0}`\n "

+             "you can modify it manually.".format(anitya_project_url)

+         )

+         package_msg = "Package wasn't created in Anitya, reason: `{0}`.".format(

+             response_msg

+         )

+ 

+         self.toddler.process_new_repo(issue, json)

+ 

+         self.toddler.dist_git.set_monitoring_status.assert_called_with(

+             namespace, repo, monitor

+         )

+         monitoring_msg = "\nMonitoring:\n{0}\n{1}".format(project_msg, package_msg)

+ 

+         message = "The Pagure repository was created at {0}/{1}/{2}{3}".format(

+             dist_git_url, namespace, repo, monitoring_msg

+         )

+ 

+         self.toddler.pagure_io.close_issue.assert_called_with(

+             100,

+             namespace=scm_request_processor.PROJECT_NAMESPACE,

+             message=message,

+             reason="Processed",

+         )

+ 

      @patch("toddlers.plugins.scm_request_processor.bugzilla_system")

      @patch(

          "toddlers.plugins.scm_request_processor.SCMRequestProcessor.validate_review_bug"
@@ -1341,7 +1719,8 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = False

          json = {

              "repo": repo,
@@ -1351,6 +1730,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

          self.toddler.branch_slas = {"rawhide": {"rawhide": "2050-06-01"}}
@@ -1411,7 +1791,8 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

+         upstreamurl = ""

          exception = False

          json = {

              "repo": repo,
@@ -1421,6 +1802,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": upstreamurl,

              "exception": exception,

          }

  
@@ -1473,7 +1855,7 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

          exception = False

          json = {

              "repo": repo,
@@ -1483,6 +1865,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": "",

              "exception": exception,

          }

  
@@ -1542,7 +1925,7 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

          exception = False

          json = {

              "repo": repo,
@@ -1552,6 +1935,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": "",

              "exception": exception,

          }

  
@@ -1627,7 +2011,7 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

          exception = False

          json = {

              "repo": repo,
@@ -1637,6 +2021,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": "",

              "exception": exception,

          }

  
@@ -1703,7 +2088,7 @@ 

          bug_id = "123"

          action = "new_repo"

          sls = {branch: "2050-06-01"}

-         monitor = "monitor"

+         monitor = "no-monitoring"

          exception = False

          json = {

              "repo": repo,
@@ -1713,6 +2098,7 @@ 

              "action": action,

              "sls": sls,

              "monitor": monitor,

+             "upstreamurl": "",

              "exception": exception,

          }

  

The added file is too large to be shown here, see it at: tests/plugins/test_unretire_packages.py
@@ -0,0 +1,410 @@ 

+ """

+ Unit tests for `toddlers.utils.anitya`.

+ """

+ 

+ from unittest.mock import Mock

+ 

+ import pytest

+ 

+ import toddlers.utils.anitya as anitya

+ 

+ 

+ class TestAnityaSetAnitya:

+     """

+     Test class for `toddlers.anitya.set_anitya` function.

+     """

+ 

+     def test_set_anitya(self):

+         """

+         Test initialization of anitya module.

+         """

+         config = {

+             "anitya_endpoint": "https://release-monitoring.org",

+             "anitya_access_token": "TOKEN",

+         }

+         anitya_obj = anitya.set_anitya(config)

+ 

+         assert anitya_obj._anitya_endpoint == config.get("anitya_endpoint")

+         assert anitya_obj._anitya_token == config.get("anitya_access_token")

+         assert anitya_obj._requests_session

+ 

+     def test_set_anitya_no_anitya_url(self):

+         """

+         Test initialization of anitya module without required config value.

+         """

+         with pytest.raises(

+             ValueError, match=r"No anitya endpoint found in config file"

+         ):

+             anitya.set_anitya({})

+ 

+     def test_set_anitya_no_anitya_api_key(self):

+         """

+         Test initialization of anitya module without required config value.

+         """

+         with pytest.raises(

+             ValueError, match=r"No anitya access token found in config file"

+         ):

+             config = {"anitya_endpoint": "https://anitya.io"}

+             anitya.set_anitya(config)

+ 

+ 

+ class TestAnityaDoesProjectExistInAnitya:

+     """

+     Test class for

+     `toddlers.anitya.Anitya.does_project_exists_in_anitya` method.

+     """

+ 

+     def setup_method(self):

+         """

+         Setup method for test class.

+         """

+         config = {

+             "anitya_endpoint": "https://release-monitoring.org",

+             "anitya_access_token": "TOKEN",

+         }

+         self.anitya_obj = anitya.set_anitya(config)

+         self.anitya_obj._requests_session = Mock()

+         self.anitya_obj.remove_trailing_slashes_from_url = Mock(

+             return_value="https://release-monitoring.org/api/v2/projects/"

+         )

+ 

+     def test_does_project_exists_in_anitya(self):

+         """

+         Assert that method will return correct response about project exists in anitya.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/projects/"

+         project_name = "amedvede_project"

+         mock_response = Mock()

+         mock_response.status_code = 200

+         mock_response.json.return_value = {

+             "items": [

+                 {

+                     "id": 123,

+                     "name": project_name,

+                 }

+             ],

+             "total_items": 1,

+         }

+         params = {"name": project_name}

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_project_exists_in_anitya(project_name)

+ 

+         assert result == "https://release-monitoring.org/project/123"

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+     def test_does_project_exists_in_anitya_project_not_found(self):

+         """

+         Assert that method will return correct response about project does not exist in anitya.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/projects/"

+         project_name = "amedvede_project"

+         mock_response = Mock()

+         mock_response.status_code = 404

+         params = {"name": project_name}

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_project_exists_in_anitya(project_name)

+ 

+         assert result is None

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+     def test_does_project_exists_in_anitya_empty_items(self):

+         """Assert that method will return correct response about project not found in anitya."""

+         endpoint = "https://release-monitoring.org/api/v2/projects/"

+         project_name = "amedvede_project"

+         mock_response = Mock()

+         mock_response.status_code = 200

+         mock_response.json.return_value = {"items": [], "total_items": 0}

+         params = {"name": project_name}

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_project_exists_in_anitya(project_name)

+ 

+         assert result is None

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+     def test_does_project_exists_in_anitya_wrong_structure(self):

+         """

+         Assert that method will return correct response about project has wrong structure in anitya.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/projects/"

+         project_name = "amedvede_project"

+         mock_response = Mock()

+         mock_response.status_code = 200

+         mock_response.json.return_value = {

+             "items": [

+                 {

+                     "wrong": "structure",

+                 }

+             ],

+             "total_items": 1,

+         }

+         params = {"name": project_name}

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_project_exists_in_anitya(project_name)

+ 

+         assert result is None

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+ 

+ class TestAnityaDoesPackageExistInAnitya:

+     """

+     Test class for `toddlers.anitya.Anitya.does_package_exists_in_anitya` method.

+     """

+ 

+     def setup_method(self):

+         """

+         Setup method for test class.

+         """

+         config = {

+             "anitya_endpoint": "https://release-monitoring.org",

+             "anitya_access_token": "TOKEN",

+         }

+         self.anitya_obj = anitya.set_anitya(config)

+         self.anitya_obj._requests_session = Mock()

+         self.anitya_obj.remove_trailing_slashes_from_url = Mock(

+             return_value="https://release-monitoring.org/api/v2/packages/"

+         )

+ 

+     @pytest.mark.parametrize(

+         "project_name, expected_project_name, expected_result",

+         [

+             ("nice_project", "nice_project", True),

+             ("nice_project", "bad_project", False),

+         ],

+     )

+     def test_does_package_exists_in_anitya(

+         self, project_name, expected_project_name, expected_result

+     ):

+         """

+         Assert that method will return correct response about package exists in anitya

+         and his project name is same with expected.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/packages/"

+         package_name = "amedvede_package"

+         distribution = "Fedora"

+         mock_response = Mock()

+         mock_response.status_code = 200

+         mock_response.json.return_value = {

+             "items": [

+                 {

+                     "name": package_name,

+                     "project": project_name,

+                 }

+             ],

+             "total_items": 1,

+         }

+         params = {

+             "name": package_name,

+             "distribution": distribution,

+         }

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_package_exists_in_anitya(

+             package_name, distribution, expected_project_name

+         )

+ 

+         assert result is expected_result  # package and project name the same

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+     def test_does_package_exists_in_anitya_not_found(self):

+         """

+         Assert that method will return correct response when package does not exist in anitya.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/packages/"

+         package_name = "amedvede_package"

+         project_name = "different name"

+         distribution = "Fedora"

+         mock_response = Mock()

+         mock_response.status_code = 202

+         params = {

+             "name": package_name,

+             "distribution": distribution,

+         }

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_package_exists_in_anitya(

+             package_name, distribution, project_name

+         )

+ 

+         assert result is False  # package and project name are different

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+     def test_does_package_exists_in_anitya_found_zero_items(self):

+         """

+         Assert that method will return correct response when response code is correct,

+         but response does not contain items.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/packages/"

+         package_name = "amedvede_package"

+         project_name = "different name"

+         distribution = "Fedora"

+         mock_response = Mock()

+         mock_response.status_code = 200

+         mock_response.json.return_value = {"items": [], "total_items": 0}

+         params = {

+             "name": package_name,

+             "distribution": distribution,

+         }

+         self.anitya_obj._requests_session.get.return_value = mock_response

+ 

+         result = self.anitya_obj.does_package_exists_in_anitya(

+             package_name, distribution, project_name

+         )

+ 

+         assert result is False  # package and project name are different

+         self.anitya_obj._requests_session.get.assert_called_once_with(

+             endpoint, params=params

+         )

+ 

+ 

+ class TestAnityaCreateProjectInAnitya:

+     """

+     Test class for `toddlers.anitya.Anitya.create_project_in_anitya` method.

+     """

+ 

+     def setup_method(self):

+         """

+         Setup method for test class.

+         """

+         config = {

+             "anitya_endpoint": "https://release-monitoring.org",

+             "anitya_access_token": "TOKEN",

+         }

+         self.anitya_obj = anitya.set_anitya(config)

+         self.anitya_obj._requests_session = Mock()

+         self.anitya_obj.remove_trailing_slashes_from_url = Mock(

+             return_value="https://release-monitoring.org/api/v2/projects/"

+         )

+ 

+     def test_create_project_in_anitya_successful_creation(self):

+         """

+         Assert that method will return correct response when project is created.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/projects/"

+         project_name = "project"

+         homepage = "https://project.com"

+         backend = "GitHub"

+         test_data = {

+             "name": project_name,

+             "homepage": homepage,

+             "backend": backend,

+         }

+         response_json = {"id": 123}

+         mock_response = Mock()

+         mock_response.status_code = 201

+         mock_response.json.return_value = response_json

+         self.anitya_obj._requests_session.post.return_value = mock_response

+ 

+         result = self.anitya_obj.create_project_in_anitya(

+             project_name, homepage, backend

+         )

+ 

+         assert result == "https://release-monitoring.org/project/123"

+         self.anitya_obj._requests_session.post.assert_called_once_with(

+             url=endpoint,

+             data=test_data,

+             headers={"Authorization": "token TOKEN"},

+         )

+ 

+     def test_create_project_in_anitya_fail(self):

+         """

+         Assert that method will return correct response when project is not created.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/projects/"

+         project_name = "project"

+         homepage = "https://project.com"

+         backend = "GitHub"

+         test_data = {

+             "name": project_name,

+             "homepage": homepage,

+             "backend": backend,

+         }

+         mock_response = Mock()

+         mock_response.status_code = 400

+         self.anitya_obj._requests_session.post.return_value = mock_response

+ 

+         result = self.anitya_obj.create_project_in_anitya(

+             project_name, homepage, backend

+         )

+ 

+         assert result is None

+         self.anitya_obj._requests_session.post.assert_called_once_with(

+             url=endpoint,

+             data=test_data,

+             headers={"Authorization": "token TOKEN"},

+         )

+ 

+ 

+ class TestAnityaCreatePackageInAnitya:

+     """

+     Test class for `toddlers.anitya.Anitya.create_package_in_anitya` method.

+     """

+ 

+     def setup_method(self):

+         """

+         Setup method for test class.

+         """

+         config = {

+             "anitya_endpoint": "https://release-monitoring.org",

+             "anitya_access_token": "TOKEN",

+         }

+         self.anitya_obj = anitya.set_anitya(config)

+         self.anitya_obj._requests_session = Mock()

+         self.anitya_obj.remove_trailing_slashes_from_url = Mock(

+             return_value="https://release-monitoring.org/api/v2/packages/"

+         )

+ 

+     @pytest.mark.parametrize(

+         "response_code, expected_result",

+         [

+             (201, "Success"),

+             (400, "Bad Request, some necessary arguments were not provided."),

+             (401, "Unauthorized, access token is incorrect."),

+             (409, "Conflict, package already exists."),

+             (404, None),

+         ],

+     )

+     def test_create_package_in_anitya(self, response_code, expected_result):

+         """

+         Assert that method will return correct response when package is created.

+         """

+         endpoint = "https://release-monitoring.org/api/v2/packages/"

+         package_name = "test_package"

+         project_name = "test_project"

+         distribution = "Fedora"

+         project_ecosystem = "https://project.com"

+         test_data = {

+             "package_name": package_name,

+             "project_name": project_name,

+             "distribution": distribution,

+             "project_ecosystem": project_ecosystem,

+         }

+         mock_response = Mock()

+         mock_response.status_code = response_code

+         self.anitya_obj._requests_session.post.return_value = mock_response

+ 

+         result = self.anitya_obj.create_package_in_anitya(

+             package_name, project_name, distribution, project_ecosystem

+         )

+ 

+         assert result == expected_result

+         self.anitya_obj._requests_session.post.assert_called_once_with(

+             url=endpoint,

+             data=test_data,

+             headers={"Authorization": "token TOKEN"},

+         )

file modified
+14 -10
@@ -2,7 +2,7 @@ 

  Unit tests for `toddlers.utils.git`.

  """

  

- from unittest.mock import call, MagicMock, Mock, patch

+ from unittest.mock import MagicMock, Mock, patch

  

  import pytest

  
@@ -290,24 +290,28 @@ 

          Assert that revert last commit process correctly.

          """

          mock_origin = MagicMock()

+         mock_origin.url = "https://example.com"

          self.repo.repo.remote.return_value = mock_origin

+         mock_git_cmd = MagicMock()

+         self.repo.repo.git = mock_git_cmd

  

-         self.repo.revert_last_commit("Revert message", "feature_branch")

+         self.repo.revert_last_commit("Revert message", "bot", "token", "feature_branch")

          self.repo.repo.git.checkout.assert_called_once_with("feature_branch")

-         self.repo.repo.git.execute.assert_has_calls(

-             [

-                 call(["git", "revert", "--no-edit", "HEAD"]),

-                 call(["git", "commit", "--amend", "-m", "Revert message"]),

-             ]

+         self.repo.repo.git.revert.assert_called_once_with("HEAD", no_edit=True)

+         self.repo.repo.git.commit.assert_called_once_with(

+             "--amend", "-m", "Revert message"

+         )

+ 

+         mock_git_cmd.push.assert_called_once_with(

+             "-u", "https://bot:token@example.com", "feature_branch"

          )

-         mock_origin.push.assert_called_once()

  

      def test_revert_last_commit_revert_exception(self):

          mock_origin = MagicMock()

          self.repo.repo.remote.return_value = mock_origin

-         self.repo.repo.git.execute.side_effect = Exception("Revert error")

+         self.repo.repo.git.revert.side_effect = Exception("Revert error")

  

-         self.repo.revert_last_commit("Revert message", "feature_branch")

+         self.repo.revert_last_commit("Revert message", "bot", "token", "feature_branch")

          mock_origin.push.assert_not_called()

  

  

file modified
+7
@@ -253,6 +253,9 @@ 

  ping_comment = "This request wants to skip bugzilla validation! {maintainers} could you check if this is correct? If yes, please respond to this ticket with 'valid' comment"

  # This is a OIDC token that allows pagure_user to push changes to dist git

  oidc_distgit_token = "OIDC token used to push git changes using pagure_user"

+ # Anitya access token and endpoint for managing project in release-monitoring

+ anitya_access_token = "API token for Anitya"

+ anitya_endpoint = "https://release-monitoring.org"

  

  

  # Pagure mapping to bugzilla
@@ -321,6 +324,10 @@ 

  

  [consumer_config.cleaning_packager_groups]

  watched_groups = ["packager"]

+ ignored_groups = ["sysadmin-main"]

+ notify_emails = [

+     "root@localhost.localdomain",

+ ]

  

  [qos]

  prefetch_size = 0

@@ -12,7 +12,7 @@ 

  

  from toddlers.base import ToddlerBase

  from toddlers.exceptions import PagureError

- from toddlers.utils import pagure

+ from toddlers.utils import notify, pagure

  

  _log = logging.getLogger(__name__)

  
@@ -36,6 +36,8 @@ 

          super().__init__()

          self.pagure_url = None

          self.dist_git = None

+         # Track users who can't be removed because they're group creators

+         self.creator_removal_failures = {}

          # Initialize IPA session

          conf_vars = ("KRB5_CONFIG", "IPA_CONFDIR")

          for conf_var in conf_vars:
@@ -75,6 +77,12 @@ 

              _log.info("No distgit groups found, bailing.")

              return

  

+         # Filter out ignored groups

+         ignored_groups = config.get("ignored_groups", [])

+         if ignored_groups:

+             distgit_groups = [g for g in distgit_groups if g not in ignored_groups]

+             _log.info(f"Filtered out ignored groups: {ignored_groups}")

+ 

          for user in message.user_names:

              self._process_removal(config, user, group, distgit_groups)

  
@@ -84,6 +92,9 @@ 

          """

          _log.info("Starting find_and_remove process")

  

+         # Reset the creator removal failures tracking

+         self.creator_removal_failures = {}

+ 

          # Get all distgit groups

          try:

              distgit_groups = self.dist_git.get_all_groups()
@@ -110,8 +121,15 @@ 

              f"Found {len(packager_members)} unique packagers (members and sponsors) "

              "in IPA packager groups"

          )

+ 

+         ignored_groups = config.get("ignored_groups", [])

+ 

          # Process each distgit group

          for group in distgit_groups:

+             # Skip ignored groups

+             if group in ignored_groups:

+                 _log.info(f"Skipping ignored group {group}")

+                 continue

              group_members = self.dist_git.get_group_members(group)

              if not group_members:

                  _log.exception(f"No group members found in distgit group {group}")
@@ -137,6 +155,31 @@ 

  

          _log.info("Completed find_and_remove process")

  

+         if self.creator_removal_failures:

+             _log.warning(

+                 "The following users could not be removed automatically "

+                 "because they are group creators and must be removed manually:"

+             )

+             _log.warning(f"Manual removal required: {self.creator_removal_failures}")

+ 

+             notify_emails = config.get("notify_emails")

+             if notify_emails:

+                 _log.info(

+                     "Sending email notification about users requiring manual removal"

+                 )

+                 email_content = self._format_creator_removal_email()

+                 try:

+                     notify.send_email(

+                         to_addresses=notify_emails,

+                         from_address=config.get("admin_email"),

+                         subject="Toddlers: Manual removal required for group creators",

+                         content=email_content,

+                         mail_server=config.get("mail_server"),

+                     )

+                     _log.info("Email notification sent successfully")

+                 except Exception as e:

+                     _log.error(f"Failed to send email notification: {e}")

+ 

      def _remove_from_distgit_and_ipa(self, user, group):

          try:

              self._remove_from_ipa(user, group)
@@ -151,10 +194,32 @@ 

              # pagure token with acls=group_modify needed

              self.dist_git.remove_member_from_group(user, group)

              _log.info(f"User {user} removed from distgit group: {group}")

-         except PagureError:

-             _log.exception(

-                 f"Error while removing user {user} from distgit group {group}"

-             )

+         except PagureError as e:

+             # Check if this is a "group creator cannot be removed" error

+             error_str = str(e)

+             if "creator of a group cannot be removed" in error_str.lower():

+                 _log.warning(

+                     f"Cannot remove user {user} from distgit group {group} - "

+                     f"user is the group creator"

+                 )

+                 self.creator_removal_failures[group] = user

+             else:

+                 _log.exception(

+                     f"Error while removing user {user} from distgit group {group}"

+                 )

+ 

+     def _format_creator_removal_email(self):

+         """Format email content for notifying admins about users requiring manual removal."""

+         content = (

+             "The following users could not be removed automatically "

+             "because they are group creators:\n\n"

+         )

+ 

+         for group, user in self.creator_removal_failures.items():

+             content += f"  {group}: {user}\n"

+ 

+         content += "\nPlease remove these users manually or transfer group ownership.\n"

+         return content

  

      def _check_ipa_removal_output(self, output, user, group, role):

          """Check IPA removal command output and raise errors if unsuccessful."""
@@ -231,6 +296,12 @@ 

              "packager-related groups as well."

          )

  

+         # Check if triggering group is in ignored list

+         ignored_groups = config.get("ignored_groups", [])

+         if group in ignored_groups:

+             _log.info(f"Skipping removal from ignored group {group}")

+             return

+ 

          # Remove user from the distgit group that triggered the toddler

          self._remove_from_distgit(user, group)

  

@@ -70,13 +70,17 @@ 

  

      def adjust_releases_for_lookaside(self, releases):

          releases = list(set(releases))  # Let's remove duplicates

-         if any(re.match(r"^f\d{2,}$", regex) for regex in releases):

-             if "rawhide" in releases:

-                 releases.remove("rawhide")

-             # Remove the highest value, which should be the rawhide

-             releases.remove(max(releases))

-             # And add rawhide instead

+ 

+         # Find fedora branches and identify the highest one

+         # which corresponds to rawhide in the lookaside cache

+         fedora_branches = [r for r in releases if re.match(r"^f\d+$", r)]

+         if fedora_branches and "rawhide" not in releases:

+             # Only replace highest Fedora with "rawhide" if rawhide is not

+             # already present.

+             highest_fedora = max(fedora_branches, key=lambda x: int(x[1:]))

+             releases.remove(highest_fedora)

              releases.append("rawhide")

+ 

          lookaside_release = []

          for release in releases:

              if "eln" in release or "-next" in release:
@@ -110,20 +114,24 @@ 

                  return

              packages = response.json()[active_release]

  

+             # Determine koji tag once per release.

+             if active_release == "rawhide":

+                 koji_tag = self.get_tag_from_target("rawhide")

+             elif "epel" in active_release:

+                 koji_tag = self.get_tag_from_target(active_release)

+             else:

+                 koji_tag = active_release

+ 

              for package in packages:

-                 if active_release == "rawhide":

-                     active_release = self.get_tag_from_target("rawhide")

-                 elif "epel" in active_release:

-                     active_release = self.get_tag_from_target(active_release)

                  _log.info(

                      f"Checking if package {package} is blocked "

-                     f"in koji for release {active_release}"

+                     f"in koji for release {koji_tag}"

                  )

                  try:

                      (resp,) = self.execute_koji_call(

                          "listPackages",

                          pkgID=package,

-                         tagID=active_release,

+                         tagID=koji_tag,

                      )

                  except TypeError:

                      _log.error("Not able to get data from koji, bailing.")
@@ -149,19 +157,19 @@ 

                              f"Package {package} is not retired on the distgit branch "

                              f"{active_release}, was probably unretired recently, bailing."

                          )

-                         return

+                         continue

  

-                     _log.info(f"Blocking package {package}, tag: {active_release}")

-                     needs_blocking[active_release].append(package)

+                     _log.info(f"Blocking package {package}, tag: {koji_tag}")

+                     needs_blocking[koji_tag].append(package)

                      self.execute_koji_call(

                          "packageListBlock",

-                         taginfo=active_release,

+                         taginfo=koji_tag,

                          pkginfo=package,

                      )

                  else:

                      _log.info(

                          f"Package {package} is already blocked in koji "

-                         f"for tag {active_release}"

+                         f"for tag {koji_tag}"

                      )

          _log.info(

              f"All packages that should be blocked in this run: {dict(needs_blocking)}"

@@ -24,7 +24,14 @@ 

  

  from toddlers.base import ToddlerBase

  from toddlers.exceptions import ValidationError

- from toddlers.utils import bugzilla_system, fedora_account, git, pagure, requests

+ from toddlers.utils import (

+     anitya,

+     bugzilla_system,

+     fedora_account,

+     git,

+     pagure,

+     requests,

+ )

  

  # Regex for branch name validation

  STREAM_NAME_REGEX = r"^[a-zA-Z0-9.\-_+]+$"
@@ -100,6 +107,9 @@ 

      # for toddler

      pagure_user: str = ""

  

+     # Anitya object to work with Anitya

+     anitya: anitya.Anitya

+ 

      def accepts_topic(self, topic: str) -> bool:

          """Returns a boolean whether this toddler is interested in messages

          from this specific topic.
@@ -187,6 +197,9 @@ 

          _log.info("Setting up connection to Bugzilla")

          bugzilla_system.set_bz(config)

  

+         _log.info("Setting up connection to Anitya")

+         self.anitya = anitya.set_anitya(config)

+ 

          try:

              if message.topic.endswith("pagure.issue.comment.added"):

                  self.process_comment(issue)
@@ -256,7 +269,11 @@ 

                  )

                  issue_body_json = json.loads(issue["content"].strip("`").strip("\n"))

                  if issue_body_json.get("action") == "new_repo":

-                     self._create_new_repo(issue, issue_body_json)

+                     self._create_new_repo(

+                         issue,

+                         issue_body_json,

+                         initial_commit=issue_body_json.get("initial_commit", True),

+                     )

                  else:

                      self.pagure_io.add_comment_to_issue(

                          issue["id"],
@@ -450,6 +467,12 @@ 

              "namespace",

              "sls",

              "monitor",

+             "upstreamurl",

+         ]

+         required_keys_for_monitor = [

+             "backend",

+             "project_name",

+             "distribution",

          ]

          for key in required_keys:

              if key not in issue_body_json.keys():
@@ -461,6 +484,18 @@ 

                  )

                  return

  

+         monitor = issue_body_json.get("monitor", "").strip()

+         if monitor != "no-monitoring":

+             for key in required_keys_for_monitor:

+                 if key not in issue_body_json.keys():

+                     self.pagure_io.close_issue(

+                         issue["id"],

+                         namespace=PROJECT_NAMESPACE,

+                         message="Invalid body, missing required field: {}".format(key),

+                         reason="Invalid",

+                     )

+                     return

+ 

          # Validate the request first

          if self._validate_new_repo_request(issue, issue_body_json):

              _log.info("Ticket passed all validations. Creating repository.")
@@ -636,6 +671,7 @@ 

          branch_name = issue_body_json.get("branch", "").strip()

          description = issue_body_json.get("description", "").strip()

          upstreamurl = issue_body_json.get("upstreamurl", "").strip()

+         monitor = issue_body_json.get("monitor", "").strip()

  

          if namespace in ["rpms", "container"]:

              default_branch = "rawhide"
@@ -736,6 +772,50 @@ 

                  'You may commit to the branch "{1}" in about '

                  "10 minutes.".format(dist_git_url, branch_name)

              )

+ 

+         if monitor != "no-monitoring":

+             _log.info("- Checking if project {0} exists in Anitya".format(repo))

+             backend = issue_body_json["backend"].strip()

+             distribution = issue_body_json["distribution"].strip()

+             project_name = issue_body_json["project_name"].strip()

+ 

+             monitoring_message = ""

+             project_msg = ""

+             package_msg = ""

+             anitya_project_url = self.anitya.does_project_exists_in_anitya(project_name)

+             if anitya_project_url is None:

+                 anitya_project_url = self.anitya.create_project_in_anitya(

+                     repo, upstreamurl, backend

+                 )

+             if anitya_project_url is None:

+                 project_msg = (

+                     "Wasn't able to create project in Anitya. "

+                     "You can create it manually on: `https://release-monitoring.org`"

+                 )

+             else:

+                 project_msg = (

+                     "Anitya project is accessible by this link \n`{0}`\n "

+                     "you can modify it manually."

+                 ).format(anitya_project_url)

+                 package_exists = self.anitya.does_package_exists_in_anitya(

+                     repo, project_name, distribution

+                 )

+                 if not package_exists:

+                     response_msg = self.anitya.create_package_in_anitya(

+                         repo, project_name, distribution, upstreamurl

+                     )

+                     if response_msg != "Success":

+                         package_msg = (

+                             "Package wasn't created in Anitya, reason: `{0}`.".format(

+                                 response_msg

+                             )

+                         )

+                     else:

+                         package_msg = "Package was created in Anitya"

+ 

+             monitoring_message = project_msg + "\n" + package_msg

+             new_repo_comment = new_repo_comment + "\nMonitoring:\n" + monitoring_message

+ 

          self.pagure_io.close_issue(

              issue["id"],

              namespace=PROJECT_NAMESPACE,

@@ -0,0 +1,581 @@ 

+ """

+ This is a script to automate unretirement of package automatically, when ticket is created.

+ 

+ Authors:    Anton Medvedev <amedvede@redhat.com>

+ 

+ """

+ 

+ import argparse

+ import json

+ import logging

+ import re

+ import sys

+ import tempfile

+ import traceback

+ from typing import Optional

+ 

+ import arrow

+ from fedora_messaging.api import Message

+ from git import GitCommandError

+ import koji

+ from pagure_messages.issue_schema import IssueNewV1

+ import tomllib

+ 

+ from toddlers.base import ToddlerBase

+ from toddlers.exceptions import ValidationError

+ from toddlers.utils import bodhi, bugzilla_system, git, pagure, requests

+ 

+ 

+ # Where to look for unretire request tickets

+ PROJECT_NAMESPACE = "releng/fedora-scm-requests"

+ # Keyword that will be searched for in the issue title

+ UNRETIRE_KEYWORD = "unretire"

+ # RPM package prefix, that will be searched in the issue title

+ RPM_PREFIX = "rpms/"

+ # Forbidden keywords for commit message

+ FORBIDDEN_KEYWORDS_FOR_COMMIT_MESSAGE = ["legal", "license"]

+ # Time difference limit not getting Bugzilla url

+ TIME_DIFFERENCE_LIMIT = 56  # 8 weeks in days

+ # Package retirement process url

+ PACKAGE_RETIREMENT_PROCESS_URL = (

+     "https://docs.fedoraproject.org/en-US/package-maintainers"

+     "/Package_Retirement_Process/#claiming"

+ )

+ # Fedora review bugzilla flag

+ FEDORA_REVIEW_FLAG_NAME = "fedora-review"

+ # Koji hub url

+ KOJIHUB_URL = "https://koji.fedoraproject.org/kojihub"

+ 

+ _log = logging.getLogger(__name__)

+ 

+ 

+ class UnretirePackages(ToddlerBase):

+     """

+     Listen for new tickets in https://pagure.io/releng/fedora-scm-requests/issues

+     and process then, either by unretiring a package or rejecting the ticket

+     """

+ 

+     name: str = "unretire_packages"

+ 

+     amqp_topics: list = ["io.pagure.*.pagure.issue.new"]

+ 

+     # Path to temporary dir

+     temp_dir: str = ""

+ 

+     # Requests session

+     requests_session: requests.requests.Session

+ 

+     # Dist-git base url

+     dist_git_base: Optional[str] = ""

+ 

+     # Pagure object connected to pagure.io

+     pagure_io: pagure.Pagure

+ 

+     # Pagure user that will be creating the comments on pagure

+     # for toddler

+     pagure_user: str = ""

+ 

+     # Git repo object

+     git_repo: git.GitRepo

+ 

+     # Koji session object

+     koji_session: koji.ClientSession

+ 

+     # Bodhi object

+     bodhi: bodhi.Bodhi

+ 

+     # OIDC distgit token

+     oidc_distgit_token: str

+ 

+     def accepts_topic(self, topic: str) -> bool:

+         """

+         Returns a boolean whether this toddler is interested in messages

+         from this specific topic.

+ 

+         :arg topic: Topic to check

+ 

+         :returns: True if topic is accepted, False otherwise

+         """

+         if topic.startswith("io.pagure."):

+             if topic.endswith("pagure.issue.new"):

+                 return True

+ 

+         return False

+ 

+     def process(

+         self,

+         config: dict,

+         message: Message,

+     ) -> None:

+         """

+         Process a given message.

+ 

+         :arg config: Toddlers configuration

+         :arg message: Message to process

+         """

+         _log.debug(

+             "Processing message:\n{0}".format(json.dumps(message.body, indent=2))

+         )

+         project_name = message.body["project"]["fullname"]

+ 

+         if project_name != PROJECT_NAMESPACE:

+             _log.info(

+                 "The message doesn't belong to project {0}. Skipping message.".format(

+                     PROJECT_NAMESPACE

+                 )

+             )

+             return

+ 

+         issue = message.body["issue"]

+ 

+         if issue["status"] != "Open":

+             _log.info(

+                 "The issue {0} is not open. Skipping message.".format(issue["id"])

+             )

+             return

+ 

+         issue_title = issue["title"]

+         words_in_issue_title = issue_title.split()

+         if UNRETIRE_KEYWORD != words_in_issue_title[0].lower():

+             _log.info(

+                 "The issue doesn't contain keyword '{0}' in the title '{1}'"

+                 "".format(UNRETIRE_KEYWORD, issue_title)

+             )

+             return

+ 

+         _log.debug("Getting temp_folder name from config.")

+         self.temp_dir = config.get("temp_folder", "")

+ 

+         _log.debug("Creating a request session.")

+         self.requests_session = requests.make_session()

+ 

+         _log.debug("Getting dist-git url from config.")

+         self.dist_git_base = config.get("dist_git_url")

+ 

+         _log.debug("Setting up connection to Pagure")

+         self.pagure_io = pagure.set_pagure(config)

+         self.pagure_user = config.get("pagure_user", "")

+ 

+         _log.debug("Setting up connection to Bugzilla")

+         bugzilla_system.set_bz(config)

+ 

+         _log.debug("Setting up session with Koji")

+         self.koji_session = koji.ClientSession(KOJIHUB_URL)

+ 

+         _log.debug("Setting up bodhi session")

+         self.bodhi = bodhi.set_bodhi(config)

+ 

+         _log.debug("Getting OIDC distgit token from config.")

+         self.oidc_distgit_token = config.get("oidc_distgit_token", "")

+ 

+         try:

+             self.process_ticket(issue)

+         except BaseException:

+             self.pagure_io.add_comment_to_issue(

+                 issue["id"],

+                 namespace=PROJECT_NAMESPACE,

+                 comment=(

+                     "Error happened during processing:\n" "```\n" "{0}\n" "```\n"

+                 ).format(traceback.format_exc()),

+             )

+ 

+     def process_ticket(self, issue: dict) -> None:

+         """

+         Process a single ticket

+ 

+         :arg issue: A dictionary containing the issue

+         """

+         _log.info("Handling pagure releng ticket '{0}'".format(issue["full_url"]))

+         try:

+             # If a ValueError is raised, that means it isn't valid JSON

+             issue_body = json.loads(issue["content"].strip("`").strip("\n"))

+         except ValueError:

+             _log.info("Invalid JSON in ticket. Closing '{0}'".format(issue["full_url"]))

+             self.pagure_io.close_issue(

+                 issue["id"],

+                 namespace=PROJECT_NAMESPACE,

+                 message="Invalid JSON provided",

+                 reason="Invalid",

+             )

+             return

+ 

+         package_name = issue_body["name"]

+         package_ns = issue_body["type"]

+         maintainer_fas = issue_body["maintainer"]

+ 

+         package_ns = self._ns_convertor(package_ns)

+ 

+         package_url = "{0}/{1}/{2}.git".format(

+             self.dist_git_base, package_ns, package_name

+         )

+ 

+         _log.debug("Verifying that package repository actually exist.")

+         if not self._does_url_exist(package_url):

+             msg = "Package repository doesnt exist. Try to repeat request."

+             _log.info(msg)

+             self.pagure_io.close_issue(

+                 issue["id"],

+                 namespace=PROJECT_NAMESPACE,

+                 message=msg,

+                 reason="Invalid",

+             )

+             return

+ 

+         _log.debug("Creating temporary directory")

+         with tempfile.TemporaryDirectory(dir=self.temp_dir) as tmp_dir:

+             _log.info("Cloning repo into dir with name '{0}'".format(self.temp_dir))

+             try:

+                 self.git_repo = git.clone_repo(package_url, tmp_dir)

+             except GitCommandError:

+                 message = "Something went wrong during cloning git repository."

+                 _log.info(message)

+                 self.pagure_io.close_issue(

+                     issue["id"],

+                     namespace=PROJECT_NAMESPACE,

+                     message=message,

+                     reason="Invalid",

+                 )

+                 return

+ 

+             branches = issue_body["branches"]

+ 

+             _log.debug("Getting active branches")

+             active_branches = self.bodhi.get_active_branches()

+ 

+             filtered_branches = [

+                 branch for branch in branches if branch in active_branches

+             ]

+ 

+             final_list_of_branches = []

+             deadpackage_file_path = "dead.package"

+             _log.debug("Verifying that branches are actually exists.")

+             _log.debug(

+                 "Verifying that branches are actually retired (have a `dead.package` file)."

+             )

+             for branch in filtered_branches:

+                 if self.git_repo.does_branch_exist(branch):

+                     if self.git_repo.does_branch_contains_file(

+                         branch, deadpackage_file_path

+                     ):

+                         final_list_of_branches.append(branch)

+ 

+             _log.debug("Verifying if package is ready for unretirement.")

+             if not self._is_package_ready_for_unretirement(

+                 issue_id=issue["id"],

+                 branches=final_list_of_branches,

+                 review_bugzilla=issue_body["review_bugzilla"],

+             ):

+                 return

+ 

+             _log.debug("Reverting retire commit")

+             revert_commit_message = "Unretirement request: {0}".format(

+                 issue["full_url"]

+             )

+             for branch in final_list_of_branches:

+                 self.git_repo.revert_last_commit(

+                     message=revert_commit_message,

+                     user=self.pagure_user,

+                     token=self.oidc_distgit_token,

+                     branch=branch,

+                 )

+ 

+             _log.debug("Unblocking tags on Koji.")

+             if self._check_tags_to_unblock(final_list_of_branches, package_name):

+                 _log.debug("Unblocking tags in koji.")

+                 for tag in final_list_of_branches:

+                     try:

+                         self.koji_session.packageListUnblock(

+                             taginfo=tag, pkginfo=package_name

+                         )

+                     except koji.GenericError:

+                         msg = "Not able to unblock `{0}` tag on koji.".format(tag)

+                         self.pagure_io.close_issue(

+                             issue_id=issue["id"],

+                             namespace=PROJECT_NAMESPACE,

+                             message=msg,

+                             reason="Invalid",

+                         )

+                         return

+ 

+             _log.debug("Verifying package is not orphan.")

+             if self.pagure_io.is_project_orphaned(

+                 namespace=package_ns, repo=package_name

+             ):

+                 if maintainer_fas == "":

+                     msg = "Package is ophaned, but maintainer fas is not provided."

+                     self.pagure_io.close_issue(

+                         issue_id=issue["id"],

+                         namespace=PROJECT_NAMESPACE,

+                         message=msg,

+                         reason="Invalid",

+                     )

+                     return

+                 self.pagure_io.assign_maintainer_to_project(

+                     namespace=package_ns,

+                     repo=package_name,

+                     maintainer_fas=maintainer_fas,

+                 )

+ 

+         _log.info(

+             "Package {0} is assigned to {1}".format(

+                 f"{package_ns}/{package_name}", maintainer_fas

+             )

+         )

+         return

+ 

+     def _is_package_ready_for_unretirement(

+         self, issue_id: int, branches: list, review_bugzilla: str

+     ) -> bool:

+         """

+         Verify that package is ready for unretirement.

+ 

+         :arg issue_id: An int value of issue ID.

+         :arg branches: A list containing branches that need to be unretired.

+         :arg review_bugzilla: A str contain url on bugzilla review.

+ 

+         :returns: Bool value whether the package was verified.

+         """

+         try:

+             _log.debug("Verifying the reason of retirement.")

+             self._verify_package_not_retired_for_reason(branches=branches)

+             _log.debug("Verifying the date of retirement.")

+             self._verify_bugzilla_ticket(

+                 review_bugzilla=review_bugzilla, branches=branches

+             )

+         except ValidationError as error:

+             self.pagure_io.close_issue(

+                 issue_id=issue_id,

+                 namespace=PROJECT_NAMESPACE,

+                 message=str(error),

+                 reason="Invalid",

+             )

+             return False

+         return True

+ 

+     def _verify_package_not_retired_for_reason(self, branches: list):

+         """

+         Verify that commit message does not contain forbidden keywords.

+ 

+         Raises:

+             `toddler.exceptions.ValidationError`: When retirement reason wasn't verified

+         """

+         _log.debug("Verifying that issue message doesn't contain forbidden keywords")

+ 

+         for branch in branches:

+             last_commit_message = self.git_repo.get_last_commit_message(branch)

+             if any(

+                 re.search(forbidden_keyword, str(last_commit_message).lower())

+                 for forbidden_keyword in FORBIDDEN_KEYWORDS_FOR_COMMIT_MESSAGE

+             ):

+                 raise ValidationError(

+                     "Package was retired for a reason: legal or license issue."

+                 )

+ 

+     def _verify_bugzilla_ticket(self, review_bugzilla, branches):

+         """

+         Verify if last commit was made more than 8 weeks ago, need to request a bugzilla ticket.

+         """

+         _log.debug("Verifying that retire commit was made less than 8 weeks ago.")

+ 

+         is_need_to_verify_bz = False

+ 

+         for branch in branches:

+             last_commit_date = self.git_repo.get_last_commit_date(branch)

+             if last_commit_date is None:

+                 raise ValidationError("Couldn't get a date of the retire commit.")

+             else:

+                 last_commit_date = arrow.get(last_commit_date)

+ 

+             current_time = arrow.utcnow()

+ 

+             time_diff_in_days = (current_time - last_commit_date).days

+ 

+             if time_diff_in_days > TIME_DIFFERENCE_LIMIT:

+                 is_need_to_verify_bz = True

+ 

+         if not is_need_to_verify_bz:

+             return

+ 

+         if review_bugzilla == "":

+             raise ValidationError(

+                 "Bugzilla url is missing, please add it and recreate the ticket."

+             )

+ 

+         bug_id = review_bugzilla

+ 

+         _log.debug("Getting the bug object from bugzilla.")

+         try:

+             bug = bugzilla_system.get_bug(bug_id)

+         except Exception as error:

+             raise ValidationError(

+                 "The Bugzilla bug could not be verified. The following "

+                 "error was encountered: {0}".format(str(error))

+             )

+ 

+         if bug is None:

+             raise ValidationError(

+                 "Bugzilla can't get the bug by bug id, fix bugzilla url."

+             )

+ 

+         if bug.product != "Fedora":

+             raise ValidationError(

+                 "The bugzilla bug is for '{0}', "

+                 "but request should be for 'Fedora'.".format(bug.product)

+             )

+ 

+         try:

+             _log.info("Getting {0} flag from bug".format(FEDORA_REVIEW_FLAG_NAME))

+             fedora_review_flag = bug.get_flags(FEDORA_REVIEW_FLAG_NAME)

+             fedora_review_flag_status = fedora_review_flag[0]["status"]

+ 

+             if fedora_review_flag_status != "+":

+                 raise ValidationError(

+                     "Flag fedora-review has wrong status, need to be +"

+                 )

+         except TypeError:

+             raise ValidationError(

+                 "Tag fedora-review is missing on bugzilla, get it and recreate the ticket."

+             )

+ 

+     def _check_tags_to_unblock(self, tags_to_unblock: list, repo: str) -> bool:

+         """

+         Check if at least one of the tags requested to be unblocked are really blocked.

+ 

+         :arg tags_to_unblock: List of branch names

+         :arg repo: Name of package

+ 

+         :returns: Bool value whether program need to unblock tags

+         """

+         _log.debug("Verifying that tags are blocked on koji.")

+         try:

+             package_tags = self.koji_session.listTags(package=repo)

+             if not package_tags:

+                 raise ValidationError("Package doesn't have tags on koji.")

+             tags_that_suppose_to_be_blocked = []

+ 

+             for tag in package_tags:

+                 prefix = "dist-"

+                 if tag["name"].startswith(prefix):

+                     tag_name = tag["name"][len(prefix) :]  # noqa: E203

+                     if tag_name in tags_to_unblock:

+                         tags_that_suppose_to_be_blocked.append(tag)

+ 

+             if len(tags_that_suppose_to_be_blocked) == 0:

+                 raise ValidationError(

+                     "Request to unblock tags that don't exist on koji."

+                 )

+             return any([tag["locked"] for tag in tags_that_suppose_to_be_blocked])

+         except koji.GenericError:

+             raise ValidationError("Package doesn't exist on koji.")

+ 

+     def _does_url_exist(self, url: str) -> bool:

+         """

+         Check whether url exist.

+ 

+         :arg url: Url that might exist

+ 

+         :returns: True if url exist, otherwise False

+         """

+         try:

+             response = self.requests_session.get(url)

+         except ConnectionError:

+             return False

+         return response.status_code == 200

+ 

+     @staticmethod

+     def _ns_convertor(namespace):

+         ns_mapping = {

+             "rpm": "rpms",

+             "test": "tests",

+             "flatpak": "flatpaks",

+             "module": "modules",

+         }

+         namespace = ns_mapping[namespace] if namespace in ns_mapping else namespace

+         return namespace

+ 

+ 

+ def _get_arguments(args):

+     """Load and parse the CLI arguments.

+ 

+     :arg args: Script arguments

+ 

+     :returns: Parsed arguments

+     """

+     parser = argparse.ArgumentParser(

+         description="Processor for Unretire packages, handling tickets from '{}'".format(

+             PROJECT_NAMESPACE

+         )

+     )

+ 

+     parser.add_argument(

+         "ticket",

+         type=int,

+         help="Number of ticket to process",

+     )

+ 

+     parser.add_argument(

+         "--config",

+         help="Configuration file",

+     )

+ 

+     parser.add_argument(

+         "--debug",

+         action="store_const",

+         dest="log_level",

+         const=logging.DEBUG,

+         default=logging.INFO,

+         help="Enable debugging output",

+     )

+     return parser.parse_args(args)

+ 

+ 

+ def _setup_logging(log_level: int) -> None:

+     """

+     Set up the logging level.

+ 

+     :arg log_level: Log level to set

+     """

+     handlers = []

+ 

+     _log.setLevel(log_level)

+     # We want all messages logged at level INFO or lower to be printed to stdout

+     info_handler = logging.StreamHandler(stream=sys.stdout)

+     handlers.append(info_handler)

+ 

+     if log_level == logging.INFO:

+         # In normal operation, don't decorate messages

+         for handler in handlers:

+             handler.setFormatter(logging.Formatter("%(message)s"))

+ 

+     logging.basicConfig(level=log_level, handlers=handlers)

+ 

+ 

+ def main(args):

+     """Main function"""

+     args = _get_arguments(args)

+     _setup_logging(log_level=args.log_level)

+     _log.info("hello i'm starting work")

+ 

+     config = tomllib.load(args.config)

+ 

+     ticket = args.ticket

+ 

+     pagure_io = pagure.set_pagure(config)

+     issue = pagure_io.get_issue(ticket, PROJECT_NAMESPACE)

+ 

+     # Convert issue to message

+     body = {"project": {"fullname": PROJECT_NAMESPACE}, "issue": issue}

+     message = IssueNewV1(body=body)

+     _log.debug("Message prepared: {}".format(message.body))

+ 

+     UnretirePackages().process(

+         config=config,

+         message=message,

+     )

+ 

+ 

+ if __name__ == "__main__":  # pragma: no cover

+     try:

+         main(sys.argv[1:])

+     except KeyboardInterrupt:

+         pass

@@ -0,0 +1,212 @@ 

+ """

+ This module is a wrapper for Anitya. It uses Anitya API to communicate and configure

+ release monitoring instance.

+ To work with it, you need to set it up by calling `set_anitya`.

+ 

+ Examples:

+     from utils import anitya

+ 

+     anitya_config = {

+         "anitya_endpoint": "https://release-monitoring.org/",

+         "anitya_access_token": "secret TOKEN",

+     }

+ 

+     anitya_obj = anitya.set_anitya(config)

+     anitya_obj.create_project_in_anitya("<name>", "<homepage>", "<backend>")

+ """

+ 

+ import logging

+ from typing import Optional

+ 

+ from toddlers.utils import requests

+ 

+ log = logging.getLogger(__name__)

+ 

+ 

+ def set_anitya(config):

+     """

+     Set the connection to the Anitya API.

+ 

+     Params:

+         config: Configuration dictionary.

+     """

+     return Anitya(config)

+ 

+ 

+ class Anitya(object):

+     """

+     Object that works with Anitya.

+     """

+ 

+     # URL to Anitya

+     _anitya_endpoint: str = ""

+     # API TOKEN to Anitya

+     _anitya_access_token: Optional[str] = None

+     # Request Session object used for communication

+     _requests_session: requests.requests.Session

+ 

+     def __init__(self, config):

+         """

+         Initialize the Anitya class.

+ 

+         Params:

+             config (dict): A configuration with anitya_endpoint and anitya_access_token keys.

+ 

+         Raises:

+             ValueError: If no pagure_api_key is provided.

+         """

+         self._anitya_endpoint = config.get("anitya_endpoint", "").removesuffix("/")

+         if not self._anitya_endpoint:

+             raise ValueError("No anitya endpoint found in config file")

+ 

+         self._anitya_token = config.get("anitya_access_token", "")

+         if not self._anitya_token:

+             raise ValueError("No anitya access token found in config file")

+ 

+         self._requests_session = requests.make_session(timeout=300)

+ 

+     def does_project_exists_in_anitya(self, project_name: str) -> Optional[str]:

+         """

+         Check if project exists in Anitya.

+ 

+         Params:

+             project_name (str): The name of the project.

+ 

+         Returns:

+             Optional[str]: project URL if it exists in Anitya, otherwise None.

+         """

+         projects_params = {

+             "name": project_name,

+         }

+         endpoint = self._anitya_endpoint + "/api/v2/projects/"

+         projects_response = self._requests_session.get(endpoint, params=projects_params)

+         if projects_response.status_code != 200:

+             log.debug("Project '{0}' not found in Anitya.".format(project_name))

+             return None

+ 

+         response_json = projects_response.json()

+         item_count = response_json["total_items"]

+         if item_count == 0:

+             log.debug("Project '{0}' not found in Anitya.".format(project_name))

+             return None

+ 

+         try:

+             project_id = response_json["items"][0]["id"]

+             project_url = "{0}/project/{1}".format(self._anitya_endpoint, project_id)

+             return project_url

+         except (KeyError, IndexError):

+             return None

+ 

+     def does_package_exists_in_anitya(

+         self, package_name: str, distribution: str, project_name: str

+     ) -> bool:

+         """

+         Check if package exists in Anitya.

+ 

+         Params:

+             package_name (str): The name of the package.

+             distribution (str): The name of the distribution.

+             project_name (str): The name of the project.

+ 

+         Returns:

+             False if package don't exist

+             False if package exist but his project is different from provided project name

+             True if package exist and his project is correct

+         """

+         endpoint = self._anitya_endpoint + "/api/v2/packages/"

+         packages_params = {

+             "name": package_name,

+             "distribution": distribution,

+         }

+         packages_response = self._requests_session.get(endpoint, params=packages_params)

+         if packages_response.status_code != 200:

+             log.info("Package '{0}' not found in Anitya.".format(package_name))

+             return False  # Not able to find package

+         response_json = packages_response.json()

+         item_count = response_json["total_items"]

+         if item_count < 1:

+             log.info("Package '{0}' not found in Anitya.".format(package_name))

+             return False

+         package_json = response_json["items"][0]

+         package_project_name = package_json["project"]

+         if package_project_name != project_name:

+             return False  # Expected and actual project name are different

+         else:

+             return True  # Expected and actual project name are the same

+ 

+     def create_project_in_anitya(

+         self,

+         name: str,

+         homepage: str,

+         backend: str,

+     ) -> Optional[str]:

+         """

+         Create a new project in Anitya.

+ 

+         Params:

+             name (str): The name of the project.

+             homepage (str): The homepage of the project.

+             backend (str): The name of the backend.

+ 

+         Returns:

+             The project URL if successful, otherwise None.

+         """

+         headers = {"Authorization": "token " + self._anitya_token}

+         endpoint = self._anitya_endpoint + "/api/v2/projects/"

+         payload = {

+             "name": name,

+             "homepage": homepage,

+             "backend": backend,

+         }

+         log.info("Creating project '{0}' in Anitya.".format(name))

+         response = self._requests_session.post(

+             url=endpoint, data=payload, headers=headers

+         )

+         if response.status_code == 201:

+             project_json = response.json()

+             project_id = project_json["id"]

+             project_url = "{0}/project/{1}".format(self._anitya_endpoint, project_id)

+             return project_url

+         else:

+             return None

+ 

+     def create_package_in_anitya(

+         self,

+         package_name: str,

+         project_name: str,

+         distribution: str,

+         project_ecosystem: str,

+     ) -> Optional[str]:

+         """

+         Create a new package in Anitya.

+ 

+         Params:

+             package_name (str): The name of the package.

+             project_name (str): The name of the project.

+             distribution (str): The name of the distribution.

+             project_ecosystem (str): The name of the ecosystem.

+ 

+         Returns:

+             Return message if status code is known, otherwise None.

+         """

+         headers = {"Authorization": "token " + self._anitya_token}

+         endpoint = self._anitya_endpoint + "/api/v2/packages/"

+         payload = {

+             "package_name": package_name,

+             "project_name": project_name,

+             "distribution": distribution,

+             "project_ecosystem": project_ecosystem,

+         }

+         log.info("Creating package '{0}' in Anitya.".format(package_name))

+         response = self._requests_session.post(

+             url=endpoint, data=payload, headers=headers

+         )

+         if response.status_code == 400:

+             return "Bad Request, some necessary arguments were not provided."

+         elif response.status_code == 401:

+             return "Unauthorized, access token is incorrect."

+         elif response.status_code == 409:

+             return "Conflict, package already exists."

+         elif response.status_code == 201:

+             return "Success"

+         return None

file modified
+12 -4
@@ -136,7 +136,9 @@ 

          except Exception:  # Raised when branch name is not correct

              return None

  

-     def revert_last_commit(self, message: str, branch: str = "rawhide") -> None:

+     def revert_last_commit(

+         self, message: str, user: str, token: str, branch: str = "rawhide"

+     ) -> None:

          """

          Revert last commit with message on requested branch.

  
@@ -149,10 +151,16 @@ 

              self.repo.git.checkout(branch)

  

              # reverting last commit and changing the commit message

-             self.repo.git.execute(["git", "revert", "--no-edit", "HEAD"])

-             self.repo.git.execute(["git", "commit", "--amend", "-m", message])

+             self.repo.git.revert("HEAD", no_edit=True)

+             self.repo.git.commit("--amend", "-m", message)

+ 

              origin = self.repo.remote("origin")

-             origin.push()

+             git_cmd = self.repo.git

+ 

+             push_url = origin.url.replace(

+                 "https://", "https://{0}:{1}@".format(user, token)

+             )

+             git_cmd.push("-u", push_url, branch)

          except Exception as error:

              print(error, "\nSomething happened during reverting the last commit")