| |
@@ -0,0 +1,673 @@
|
| |
+ """
|
| |
+ This script takes as input the fedora messages published under the topic
|
| |
+ ``toddlers.trigger.distgit_bugzilla_sync`` and runs a sync of package ownership
|
| |
+ from distgit to bugzilla.
|
| |
+
|
| |
+ Authors: Michal Konecny <mkonecny@redhat.com>
|
| |
+
|
| |
+ """
|
| |
+ import argparse
|
| |
+ import collections
|
| |
+ import datetime
|
| |
+ import json
|
| |
+ import logging
|
| |
+ from operator import itemgetter
|
| |
+ import os
|
| |
+ import re
|
| |
+ import sys
|
| |
+ import time
|
| |
+ from typing import NoReturn
|
| |
+ import xmlrpc
|
| |
+
|
| |
+ import toml
|
| |
+
|
| |
+ try:
|
| |
+ import tqdm
|
| |
+ except ImportError:
|
| |
+ tqdm = None
|
| |
+
|
| |
+ from ..base import ToddlerBase
|
| |
+ from ..utils import bugzilla_system, fedora_account, notify
|
| |
+ from ..utils.package_summaries import PackageSummaries
|
| |
+ from ..utils.requests import make_session
|
| |
+
|
| |
+ _log = logging.getLogger(__name__)
|
| |
+
|
| |
+ # Name of the user cache file
|
| |
+ USER_CACHE = "user_cache.json"
|
| |
+
|
| |
+
|
| |
+ class DistgitBugzillaSync(ToddlerBase):
|
| |
+ """Listens to messages sent by playtime (which lives in toddlers) to sync
|
| |
+ package ownership from distgit to bugzilla.
|
| |
+ """
|
| |
+
|
| |
+ name = "distgit_bugzilla_sync"
|
| |
+
|
| |
+ amqp_topics = ["org.fedoraproject.*.toddlers.trigger.distgit_bugzilla_sync"]
|
| |
+
|
| |
+ def __init__(self):
|
| |
+ self.requests_session = make_session()
|
| |
+ # List of distgit project
|
| |
+ self.pagure_projects = []
|
| |
+ # Dictionary of errors to send notifications about
|
| |
+ self.errors = collections.defaultdict(list)
|
| |
+ # Mapping of branch regex to product, will be filled from configuration
|
| |
+ self.branch_regex_to_product = {}
|
| |
+ # Mapping of namespace to product, will be filled from configuration
|
| |
+ self.namespace_to_product = {}
|
| |
+ # Mapping of product to branch regex, will be filled from configuration
|
| |
+ self.product_to_branch_regex = {}
|
| |
+
|
| |
+ def accepts_topic(self, topic: str) -> bool:
|
| |
+ """Returns a boolean whether this toddler is interested in messages
|
| |
+ from this specific topic.
|
| |
+
|
| |
+ :arg topic: Topic to check.
|
| |
+
|
| |
+ :returns: True if topic is accepted, False otherwise.
|
| |
+ """
|
| |
+ return topic.startswith("org.fedoraproject.") and topic.endswith(
|
| |
+ "toddlers.trigger.distgit_bugzilla_sync"
|
| |
+ )
|
| |
+
|
| |
+ def process(
|
| |
+ self,
|
| |
+ config: dict,
|
| |
+ message: dict,
|
| |
+ projects: list = None,
|
| |
+ print_fas_names: bool = False,
|
| |
+ print_no_change: bool = False,
|
| |
+ dry_run: bool = False,
|
| |
+ ) -> NoReturn:
|
| |
+ """Process a given message.
|
| |
+
|
| |
+ :arg config: Toddlers configuration
|
| |
+ :arg message: Message to process
|
| |
+ :arg projects: Distgit projects to sync
|
| |
+ :arg print_fas_names: Print FAS names instead of
|
| |
+ bugzilla usernames (e-mails)
|
| |
+ :arg print_no_change: Log message even for projects
|
| |
+ where no change was done
|
| |
+ :arg dry_run: Don't do any change in bugzilla
|
| |
+ """
|
| |
+ try:
|
| |
+ email_overrides = toml.load(config["email_overrides_file"])
|
| |
+ except Exception:
|
| |
+ _log.error("Failed to load the file containing the email-overrides")
|
| |
+ raise
|
| |
+
|
| |
+ # Fill the class attributes we will need later
|
| |
+ self.namespace_to_product = {
|
| |
+ p["namespace"]: n for n, p in config["products"].items() if "namespace" in p
|
| |
+ }
|
| |
+ self.product_to_branch_regex = {
|
| |
+ n: re.compile(p["branch_regex"])
|
| |
+ for n, p in config["products"].items()
|
| |
+ if "branch_regex" in p
|
| |
+ }
|
| |
+ self.branch_regex_to_product = {
|
| |
+ n: r for r, n in self.product_to_branch_regex.items()
|
| |
+ }
|
| |
+ times = {"start": time.time()}
|
| |
+
|
| |
+ # Clean the errors
|
| |
+ self.errors.clear()
|
| |
+
|
| |
+ _log.debug("Building a cache of the rpm package summaries")
|
| |
+ rpm_summaries = PackageSummaries().get_package_summaries(config)
|
| |
+
|
| |
+ self._get_pagure_projects(config["dist_git_url"], project_list=projects)
|
| |
+
|
| |
+ self._add_branches_products_and_summaries(
|
| |
+ config["pdc_config"]["server"].split("rest_api")[0],
|
| |
+ config["pdc_types"],
|
| |
+ rpm_summaries,
|
| |
+ config["ignorable_accounts"],
|
| |
+ )
|
| |
+
|
| |
+ times["data structure end"] = time.time()
|
| |
+
|
| |
+ _log.info("Setting up connection to FAS")
|
| |
+ fedora_account.set_fas(config)
|
| |
+
|
| |
+ _log.info("Build bugzilla mail cache for users")
|
| |
+ bz_mail_to_fas = {}
|
| |
+ fas_to_bz_mail = {}
|
| |
+
|
| |
+ for project in self.pagure_projects:
|
| |
+ if "products_poc" in project:
|
| |
+ for product, product_poc in project["products_poc"].items():
|
| |
+ username = product_poc
|
| |
+ if username and username not in fas_to_bz_mail:
|
| |
+ bz_mail = self._get_bz_mail(username, email_overrides)
|
| |
+ if bz_mail:
|
| |
+ fas_to_bz_mail[username] = bz_mail
|
| |
+ bz_mail_to_fas[bz_mail] = username
|
| |
+ for watcher in project["watchers"]:
|
| |
+ if watcher not in fas_to_bz_mail:
|
| |
+ bz_mail = self._get_bz_mail(watcher, email_overrides)
|
| |
+ if bz_mail:
|
| |
+ fas_to_bz_mail[watcher] = bz_mail
|
| |
+ bz_mail_to_fas[bz_mail] = watcher
|
| |
+
|
| |
+ times["FAS cache building end"] = time.time()
|
| |
+
|
| |
+ _log.info("Setting up connection to Bugzilla")
|
| |
+ bugzilla_system.set_bz(config)
|
| |
+
|
| |
+ _log.debug("Building bugzilla's products in-memory cache")
|
| |
+ product_cache = {}
|
| |
+ for collection in config["products"]:
|
| |
+ product_cache[collection] = bugzilla_system.get_product_info_packages(
|
| |
+ collection
|
| |
+ )
|
| |
+ times["BZ cache building end"] = time.time()
|
| |
+
|
| |
+ if dry_run:
|
| |
+ _log.info("Querying bugzilla but not doing anything")
|
| |
+ else:
|
| |
+ _log.info("Updating bugzilla")
|
| |
+
|
| |
+ # If the import fails, no progress bar
|
| |
+ # At DEBUG or below, we're showing things at each iteration so the progress
|
| |
+ # bar doesn't look good.
|
| |
+ # At WARNING or above, we do not want to show anything.
|
| |
+ if (
|
| |
+ tqdm is not None and _log.getEffectiveLevel() == logging.INFO
|
| |
+ ): # pragma no cover
|
| |
+ self.pagure_projects = tqdm.tqdm(self.pagure_projects)
|
| |
+
|
| |
+ for project in sorted(self.pagure_projects, key=itemgetter("name")):
|
| |
+ for product in project["products"]:
|
| |
+
|
| |
+ # Turn the watchers list into something usable by bugzilla
|
| |
+ cc_list = []
|
| |
+ cc_list_lower = []
|
| |
+ for watcher in project["watchers"]:
|
| |
+ if watcher in fas_to_bz_mail:
|
| |
+ cc_list.append(fas_to_bz_mail[watcher])
|
| |
+ cc_list_lower.append(fas_to_bz_mail[watcher].lower())
|
| |
+ else:
|
| |
+ self.errors["mails"].append(
|
| |
+ f"`{watcher}` has no bugzilla_email or mailing_list set on "
|
| |
+ f"`{product}/{project['name']}`"
|
| |
+ )
|
| |
+
|
| |
+ # Add owner to the cclist so comaintainers taking over a bug don't
|
| |
+ # have to do this manually
|
| |
+ owner = project["products_poc"][product]
|
| |
+ if owner in fas_to_bz_mail:
|
| |
+ if fas_to_bz_mail[owner].lower() not in cc_list_lower:
|
| |
+ cc_list.append(fas_to_bz_mail[owner])
|
| |
+ else:
|
| |
+ # The owner doesn't have a bugzilla e-mail on FAS account
|
| |
+ # Not much we can do about it, log the error and continue
|
| |
+ self.errors["mails"].append(
|
| |
+ f"`{owner}` has no bugzilla_email or mailing_list set on "
|
| |
+ f"`{product}/{project['name']}`"
|
| |
+ )
|
| |
+ continue
|
| |
+
|
| |
+ qa_contact_email = config["default_qa_contact"]
|
| |
+
|
| |
+ package = project["name"]
|
| |
+ try:
|
| |
+ if package in product_cache.get(product, []):
|
| |
+ bugzilla_system.edit_component(
|
| |
+ owner=fas_to_bz_mail[owner],
|
| |
+ product=product,
|
| |
+ package=project["name"],
|
| |
+ component=product_cache[product][package],
|
| |
+ cc_list=cc_list,
|
| |
+ versions=config["products"][product]["versions"],
|
| |
+ description=project["summary"],
|
| |
+ fas_users_info=bz_mail_to_fas,
|
| |
+ retired=project["products_retired"][product],
|
| |
+ print_fas_names=print_fas_names,
|
| |
+ print_no_change=print_no_change,
|
| |
+ dry_run=dry_run,
|
| |
+ )
|
| |
+ else:
|
| |
+ bugzilla_system.add_component(
|
| |
+ product=product,
|
| |
+ owner=fas_to_bz_mail[owner],
|
| |
+ package=project["name"],
|
| |
+ qa_contact=qa_contact_email,
|
| |
+ cc_list=cc_list,
|
| |
+ fas_users_info=bz_mail_to_fas,
|
| |
+ description=project["summary"],
|
| |
+ retired=project["products_retired"][product],
|
| |
+ print_fas_names=print_fas_names,
|
| |
+ dry_run=dry_run,
|
| |
+ )
|
| |
+ except xmlrpc.client.ProtocolError as e:
|
| |
+ # Unrecoverable and likely means that nothing is going to
|
| |
+ # succeed.
|
| |
+ self.errors["bugzilla_raw"].append(str(e.args))
|
| |
+ self.errors["bugzilla"].append(
|
| |
+ f"Failed to update: `{product}/{project['name']}`:"
|
| |
+ f"\n {e}"
|
| |
+ f"\n {e.args}"
|
| |
+ )
|
| |
+ break
|
| |
+ except xmlrpc.client.Error as e:
|
| |
+ # An error occurred in the xmlrpc call. Shouldn't happen but
|
| |
+ # we better see what it is
|
| |
+ self.errors["bugzilla_raw"].append(
|
| |
+ "%s -- %s" % (project["name"], e.args[-1])
|
| |
+ )
|
| |
+ self.errors["bugzilla"].append(
|
| |
+ f"Failed to update: `{product}/{project['name']}`:"
|
| |
+ f"\n {e}"
|
| |
+ f"\n {e.args}"
|
| |
+ )
|
| |
+
|
| |
+ if self.errors:
|
| |
+ mail_server = config["mail_server"]
|
| |
+ admin_email = config["admin_email"]
|
| |
+ notify_admins = config["notify_admins"]
|
| |
+ if not dry_run and config["user_notifications"]:
|
| |
+ self._notify_users(
|
| |
+ os.path.join(config["temp_folder"], USER_CACHE),
|
| |
+ mail_server,
|
| |
+ admin_email,
|
| |
+ notify_admins,
|
| |
+ )
|
| |
+
|
| |
+ # Build the report for the admins
|
| |
+ report = ["ERROR REPORT"]
|
| |
+ for key in ["configuration", "PDC", "mails", "bugzilla"]:
|
| |
+ if self.errors[key]:
|
| |
+ report.append(key)
|
| |
+ report.append(" - {}".format("\n - ".join(self.errors[key])))
|
| |
+ report.append("")
|
| |
+
|
| |
+ if dry_run:
|
| |
+ _log.debug("*" * 80)
|
| |
+ _log.debug("\n".join(report))
|
| |
+
|
| |
+ # Do not send the email in dryrun or when the error only relates to
|
| |
+ # configuration (which will always happen for flatpaks and tests)
|
| |
+ if not dry_run and tuple(self.errors) != ("configuration",):
|
| |
+ notify.notify_admins_distgit_sync_error(
|
| |
+ mail_server, admin_email, notify_admins, report
|
| |
+ )
|
| |
+
|
| |
+ times["end"] = time.time()
|
| |
+
|
| |
+ _log.info(" ----------")
|
| |
+ _log.info("Building the data structure")
|
| |
+ delta = times["data structure end"] - times["start"]
|
| |
+ _log.info(" Ran on %2f seconds -- ie: %2f minutes", delta, delta / 60)
|
| |
+
|
| |
+ _log.info("Building the FAS cache")
|
| |
+ delta = times["FAS cache building end"] - times["data structure end"]
|
| |
+ _log.info(" Ran on %2f seconds -- ie: %2f minutes", delta, delta / 60)
|
| |
+
|
| |
+ _log.info("Building the bugzilla cache")
|
| |
+ delta = times["BZ cache building end"] - times["FAS cache building end"]
|
| |
+ _log.info(" Ran on %2f seconds -- ie: %2f minutes", delta, delta / 60)
|
| |
+
|
| |
+ _log.info("Interacting with bugzilla")
|
| |
+ delta = times["end"] - times["BZ cache building end"]
|
| |
+ _log.info(" Ran on %2f seconds -- ie: %2f minutes", delta, delta / 60)
|
| |
+
|
| |
+ _log.info("Total")
|
| |
+ delta = times["end"] - times["start"]
|
| |
+ _log.info(" Ran on %2f seconds -- ie: %2f minutes", delta, delta / 60)
|
| |
+
|
| |
+ def _get_pagure_projects(
|
| |
+ self, dist_git_url: str, project_list: list = None
|
| |
+ ) -> NoReturn:
|
| |
+ """Builds a list of the projects on pagure in `self.pagure_projects`.
|
| |
+ Where every item is a dict containing:
|
| |
+ - the namespace of the project
|
| |
+ - the name of the project
|
| |
+ - the point of contact of this project (ie: the default assignee
|
| |
+ in bugzilla)
|
| |
+ - the watchers of this project (ie: the initial CC list in bugzilla)
|
| |
+
|
| |
+ :arg dist_git_url: Pagure distgit URL
|
| |
+ :arg project_list: List of the projects that will be in the returned list.
|
| |
+ If not given, returns every project in pagure. Default set to `None`.
|
| |
+ """
|
| |
+ # Get the initial ownership and CC data from pagure
|
| |
+ # This part is easy.
|
| |
+ poc_url = dist_git_url + "/extras/pagure_poc.json"
|
| |
+ _log.debug("Querying %r for points of contact.", poc_url)
|
| |
+ pagure_namespace_to_poc = self.requests_session.get(poc_url, timeout=120).json()
|
| |
+
|
| |
+ cc_url = dist_git_url + "/extras/pagure_bz.json"
|
| |
+ _log.debug("Querying %r for initial cc list.", cc_url)
|
| |
+ pagure_namespace_to_cc = self.requests_session.get(cc_url, timeout=120).json()
|
| |
+
|
| |
+ # Combine and collapse those two into a single list:
|
| |
+ self.pagure_projects = []
|
| |
+ if project_list:
|
| |
+ project_list = {tuple(p.split("/", 1)) for p in project_list}
|
| |
+ for namespace, entries in pagure_namespace_to_poc.items():
|
| |
+ for name, poc in entries.items():
|
| |
+ if not project_list or (namespace, name) in project_list:
|
| |
+ self.pagure_projects.append(
|
| |
+ {
|
| |
+ "namespace": namespace,
|
| |
+ "name": name,
|
| |
+ "poc": poc["fedora"],
|
| |
+ "epelpoc": poc["epel"],
|
| |
+ "watchers": pagure_namespace_to_cc[namespace][name],
|
| |
+ }
|
| |
+ )
|
| |
+
|
| |
+ def _add_branches_products_and_summaries(
|
| |
+ self,
|
| |
+ pdc_url: str,
|
| |
+ pdc_types: dict,
|
| |
+ rpm_summaries: dict,
|
| |
+ ignorable_accounts: list,
|
| |
+ ) -> NoReturn:
|
| |
+ """For each project retrieved, this method adds branches, products
|
| |
+ and summary information.
|
| |
+ The branches are retrieved from PDC.
|
| |
+ The products are determined based on the branches.
|
| |
+ The summaries are coming from the primary.xml file of the Rawhide repodata
|
| |
+ in Koji.
|
| |
+
|
| |
+ :arg pdc_url: PDC url to get data from
|
| |
+ :arg pdc_types: Mapping of types of PDC to distgit namespaces
|
| |
+ :arg rpm_summaries: Package summaries from koji
|
| |
+ :arg ignorable_accounts: Accounts to ignore when filling up watchers list.
|
| |
+ """
|
| |
+ _log.debug("Querying PDC for EOL information.")
|
| |
+
|
| |
+ pdc_branches = self.requests_session.get(
|
| |
+ pdc_url + "extras/active_branches.json"
|
| |
+ ).json()
|
| |
+ for idx, project in enumerate(self.pagure_projects):
|
| |
+ # Summary
|
| |
+ summary = None
|
| |
+ if project["namespace"] == "rpms":
|
| |
+ summary = rpm_summaries.get(project["name"])
|
| |
+ project["summary"] = summary
|
| |
+
|
| |
+ # Branches
|
| |
+ if project["namespace"] not in pdc_types:
|
| |
+ project["branches"] = []
|
| |
+ project["products"] = []
|
| |
+ error = (
|
| |
+ f'Namespace `{project["namespace"]}` not found in the pdc_type '
|
| |
+ f"configuration key -- ignoring it"
|
| |
+ )
|
| |
+ if error not in self.errors["configuration"]:
|
| |
+ self.errors["configuration"].append(error)
|
| |
+ _log.debug(
|
| |
+ "Namespace `%s` not found in the pdc_type "
|
| |
+ "configuration key, project %s/%s "
|
| |
+ "ignored",
|
| |
+ project["namespace"],
|
| |
+ project["namespace"],
|
| |
+ project["name"],
|
| |
+ )
|
| |
+ _log.debug(
|
| |
+ "Namespace `%s` not found in the pdc_type "
|
| |
+ "configuration key, project %s/%s "
|
| |
+ "ignored",
|
| |
+ project["namespace"],
|
| |
+ project["namespace"],
|
| |
+ project["name"],
|
| |
+ )
|
| |
+ continue
|
| |
+
|
| |
+ pdc_type = pdc_types[project["namespace"]]
|
| |
+ project["branches"] = pdc_branches.get(pdc_type, {}).get(
|
| |
+ project["name"], []
|
| |
+ )
|
| |
+ if not project["branches"]:
|
| |
+ self.errors["PDC"].append(
|
| |
+ f"No PDC branch found for {project['namespace']}/{project['name']}"
|
| |
+ )
|
| |
+
|
| |
+ # Products
|
| |
+ products = set()
|
| |
+ for branch, active in project.get("branches"):
|
| |
+ for regex, product in self.branch_regex_to_product.items():
|
| |
+ if regex.match(branch):
|
| |
+ products.add(product)
|
| |
+ break
|
| |
+ else:
|
| |
+ products.add(self.namespace_to_product[project["namespace"]])
|
| |
+ project["products"] = list(sorted(products))
|
| |
+
|
| |
+ products_poc = {}
|
| |
+ products_retired = {}
|
| |
+ for product in products:
|
| |
+ owner = project["poc"]
|
| |
+ # Check if the project is retired in PDC, and if so set assignee to orphan.
|
| |
+ if self._is_retired(product, project):
|
| |
+ owner = "orphan"
|
| |
+ products_retired[product] = True
|
| |
+ else:
|
| |
+ products_retired[product] = False
|
| |
+
|
| |
+ # Check if the Bugzilla ticket assignee has been manually overridden
|
| |
+ if product == "Fedora EPEL":
|
| |
+ products_poc[product] = project["epelpoc"]
|
| |
+ else:
|
| |
+ products_poc[product] = owner
|
| |
+
|
| |
+ project["products_poc"] = products_poc
|
| |
+ project["products_retired"] = products_retired
|
| |
+
|
| |
+ # Clean up the watchers we never want to sync to bugzilla
|
| |
+ # If these users are POC for a project, things will not work, which
|
| |
+ # is expected/desired.
|
| |
+ for user in ignorable_accounts or []:
|
| |
+ if user in (project.get("watchers") or []):
|
| |
+ project["watchers"].remove(user)
|
| |
+
|
| |
+ self.pagure_projects[idx] = project
|
| |
+
|
| |
+ def _get_bz_mail(self, username: str, email_overrides: list) -> str:
|
| |
+ """Get bugzilla e-mail for username.
|
| |
+
|
| |
+ :arg username: FAS username
|
| |
+ :arg email_overrides: E-mail overrides for bugzilla
|
| |
+
|
| |
+ :returns: Bugzilla e-mail
|
| |
+ """
|
| |
+ if username.startswith("@"):
|
| |
+ return fedora_account.get_bz_email_group(username[1:], email_overrides)
|
| |
+ return fedora_account.get_bz_email_user(username, email_overrides)
|
| |
+
|
| |
+ def _is_retired(self, product: str, project: str) -> bool:
|
| |
+ """Check if the project is retired.
|
| |
+ The project is considered retired if there is no active branch.
|
| |
+
|
| |
+ :arg product: Product to determine branches for which to look (example: "Fedora")
|
| |
+ :arg project: Project to check
|
| |
+
|
| |
+ :returns: Retired status.
|
| |
+ """
|
| |
+ branches = project["branches"]
|
| |
+ branch_regex = self.product_to_branch_regex.get(product)
|
| |
+ for branch, active in branches:
|
| |
+ if branch_regex:
|
| |
+ if branch_regex.match(branch) and active:
|
| |
+ return False
|
| |
+ else:
|
| |
+ if active:
|
| |
+ return False
|
| |
+ # No active branches means it is retired.
|
| |
+ return True
|
| |
+
|
| |
+ def _notify_users(
|
| |
+ self, user_cache: str, mail_server: str, admin_email: str, notify_admins: list
|
| |
+ ) -> NoReturn:
|
| |
+ """Sent notification to users if they had an invalid bugzilla mail.
|
| |
+
|
| |
+ :arg user_cache: Cache containing user notification data
|
| |
+ :arg mail_server: Mail server to use for sending
|
| |
+ :arg admin_email: E-mail address to send from
|
| |
+ :arg notify_admins: Admin e-mails to add to CC
|
| |
+ """
|
| |
+ data = {}
|
| |
+ if os.path.exists(user_cache):
|
| |
+ try:
|
| |
+ with open(user_cache) as stream:
|
| |
+ data = json.load(stream)
|
| |
+ except Exception as err:
|
| |
+ _log.error(
|
| |
+ "Could not read the json file at %s: \nError: %s", user_cache, err
|
| |
+ )
|
| |
+
|
| |
+ new_data = {}
|
| |
+ seen = []
|
| |
+ for error in self.errors["bugzilla"]:
|
| |
+ if "The name " in error and " is not a valid username" in error:
|
| |
+ user_email = (
|
| |
+ error.split(" is not a valid username")[0]
|
| |
+ .split("The name ")[1]
|
| |
+ .strip()
|
| |
+ )
|
| |
+ now = datetime.datetime.utcnow()
|
| |
+ notify_user = False
|
| |
+
|
| |
+ # See if we already know about this user
|
| |
+ if user_email in data and data[user_email]["last_update"]:
|
| |
+ last_update = datetime.datetime.fromtimestamp(
|
| |
+ int(data[user_email]["last_update"])
|
| |
+ )
|
| |
+ # Only notify users once per hour
|
| |
+ if (now - last_update).seconds >= 3600:
|
| |
+ notify_user = True
|
| |
+ else:
|
| |
+ new_data[user_email] = data[user_email]
|
| |
+ elif not data or user_email not in data:
|
| |
+ notify_user = True
|
| |
+
|
| |
+ # Ensure we notify the user only once, no matter how many errors we
|
| |
+ # got concerning them.
|
| |
+ if user_email not in seen:
|
| |
+ seen.append(user_email)
|
| |
+
|
| |
+ if notify_user:
|
| |
+ notify.notify_packager_distgit_sync_error(
|
| |
+ mail_server, admin_email, user_email, notify_admins
|
| |
+ )
|
| |
+
|
| |
+ new_data[user_email] = {"last_update": time.mktime(now.timetuple())}
|
| |
+
|
| |
+ with open(user_cache, "w") as stream:
|
| |
+ json.dump(new_data, stream)
|
| |
+
|
| |
+
|
| |
+ # In case this code needs to be run manually for some projects
|
| |
+ # the following code allows to run this script stand-alone if
|
| |
+ # needed.
|
| |
+
|
| |
+
|
| |
+ def _get_arguments(args):
|
| |
+ """Load and parse the CLI arguments.
|
| |
+
|
| |
+ :arg args: Script arguments
|
| |
+
|
| |
+ :returns: Parsed arguments
|
| |
+ """
|
| |
+ parser = argparse.ArgumentParser(
|
| |
+ description="Sync distgit package info with bugzilla"
|
| |
+ )
|
| |
+
|
| |
+ parser.add_argument(
|
| |
+ "conf",
|
| |
+ help="Configuration file",
|
| |
+ )
|
| |
+
|
| |
+ parser.add_argument(
|
| |
+ "-p",
|
| |
+ "--project",
|
| |
+ dest="projects",
|
| |
+ nargs="+",
|
| |
+ help="Update one or more projects (provided as namespace/name), "
|
| |
+ "in all of its products",
|
| |
+ )
|
| |
+
|
| |
+ parser.add_argument(
|
| |
+ "--dry-run",
|
| |
+ action="store_true",
|
| |
+ dest="dry_run",
|
| |
+ default=False,
|
| |
+ help="Do not change anything on bugzilla",
|
| |
+ )
|
| |
+
|
| |
+ parser.add_argument(
|
| |
+ "--debug",
|
| |
+ action="store_const",
|
| |
+ dest="log_level",
|
| |
+ const=logging.DEBUG,
|
| |
+ default=logging.INFO,
|
| |
+ help="Enable debugging output",
|
| |
+ )
|
| |
+
|
| |
+ parser.add_argument(
|
| |
+ "--print-fas-names",
|
| |
+ action="store_true",
|
| |
+ default=False,
|
| |
+ help="Print FAS names rather than email addresses in output, useful when pasting into "
|
| |
+ "public fora",
|
| |
+ )
|
| |
+
|
| |
+ parser.add_argument(
|
| |
+ "--print-no-change",
|
| |
+ action="store_true",
|
| |
+ default=False,
|
| |
+ help="Print elements that are not being changed as they are checked",
|
| |
+ )
|
| |
+
|
| |
+ return parser.parse_args(args)
|
| |
+
|
| |
+
|
| |
+ def _setup_logging(log_level: int) -> NoReturn:
|
| |
+ """Setup the logging level.
|
| |
+
|
| |
+ :arg log_level: Log level to set
|
| |
+ """
|
| |
+ handlers = []
|
| |
+
|
| |
+ _log.setLevel(log_level)
|
| |
+ # We want all messages logged at level INFO or lower to be printed to stdout
|
| |
+ info_handler = logging.StreamHandler(stream=sys.stdout)
|
| |
+ handlers.append(info_handler)
|
| |
+
|
| |
+ if log_level == logging.INFO:
|
| |
+ # In normal operation, don't decorate messages
|
| |
+ for handler in handlers:
|
| |
+ handler.setFormatter(logging.Formatter("%(message)s"))
|
| |
+
|
| |
+ logging.basicConfig(level=log_level, handlers=handlers)
|
| |
+
|
| |
+
|
| |
+ def main(args):
|
| |
+ """ Main function. """
|
| |
+ args = _get_arguments(args)
|
| |
+ _setup_logging(log_level=args.log_level)
|
| |
+
|
| |
+ config = toml.load(args.conf)
|
| |
+ parsed_config = config.get("consumer_config", {}).get("default", {})
|
| |
+ parsed_config.update(
|
| |
+ config.get("consumer_config", {}).get("distgit_bugzilla_sync", ())
|
| |
+ )
|
| |
+ DistgitBugzillaSync().process(
|
| |
+ config=parsed_config,
|
| |
+ message={},
|
| |
+ projects=args.projects,
|
| |
+ print_fas_names=args.print_fas_names,
|
| |
+ print_no_change=args.print_no_change,
|
| |
+ dry_run=args.dry_run,
|
| |
+ )
|
| |
+
|
| |
+
|
| |
+ if __name__ == "__main__": # pragma: no cover
|
| |
+ try:
|
| |
+ main(sys.argv[1:])
|
| |
+ except KeyboardInterrupt:
|
| |
+ pass
|
| |
This PR is migration of
https://pagure.io/fedora-infra/distgit-bugzilla-sync script to toddlers.
The test coverage is 100% and there was manual test done with dry-run
parameter.
The toddler is executed by the message emitted by playtime. It's using
the same email_overrides file as
packagers_without_bugzilla
toddlerand introduces a few other configuration options of their own.
The toddler could be also run manually by calling
python3 -m toddlers.plugins.distgit_bugzilla_sync <conf>
where<conf>
is theconfiguration file that needs to have
default
anddistgit_bugzilla_sync
sections from
consumer_config
toml configuration.Signed-off-by: Michal Konečný mkonecny@redhat.com