#79 Packager Dashboard refactoring and cleanup
Merged 3 years ago by frantisekz. Opened 3 years ago by frantisekz.

file modified
-1
@@ -24,7 +24,6 @@ 

  

  from flask import Flask

  from flask_sqlalchemy import SQLAlchemy

- from flask_caching import Cache

  from flask_cors import CORS

  from flask_oidc import OpenIDConnect

  from flask_login import LoginManager

@@ -23,7 +23,7 @@ 

  from flask import jsonify

  

  from oraculum import app, CACHE

- from oraculum.utils import libkarma

+ from oraculum.utils import bodhi

  

  def get_current_releases():

      releases = CACHE.get('fedora_releases')["values"][:-1] # Cut the last element because we don't want to query rawhide
@@ -34,7 +34,7 @@ 

      updates = {}

      for single_release in current_releases:

          app.logger.debug("Appending updates for release: %s" % single_release)

-         updates[single_release] = libkarma.get_updates(single_release, app.logger)

+         updates[single_release] = bodhi.get_updates_from_bodhi(single_release)

      return updates

  

  

file modified
+14 -14
@@ -22,7 +22,7 @@ 

  from flask import url_for, jsonify

  

  from oraculum import app, CACHE

- from oraculum.utils import dashboard

+ from oraculum.utils import dashboard_helpers, pagure, koschei, health_check, bodhi, bugzilla

  

  from oraculum.action_providers import ACTION_PROVIDERS

  
@@ -41,27 +41,27 @@ 

      """

      Add providers to be cached here

      """

-     CACHE.register('fedora_releases', dashboard.get_fedora_releases)

+     CACHE.register('fedora_releases', dashboard_helpers.get_fedora_releases)

      CACHE.register('landing_page', api_v1_landing_page)

      for p_name, p_module in ACTION_PROVIDERS.items():

          CACHE.register(p_name, p_module.get_actions)

  

-     CACHE.register('pagure_groups', dashboard.get_pagure_groups)

+     CACHE.register('pagure_groups', pagure.get_pagure_groups)

      CACHE.register('packages_owners_json',

-                    lambda url=app.config['PACKAGE_MAINTAINERS_JSON_URL']: dashboard.get_json(url))

-     CACHE.register('orphans_json', lambda url=app.config['ORPHANS_JSON_URL']: dashboard.get_json(url))

-     CACHE.register('koschei_data', dashboard.parse_koschei_data)

-     CACHE.register('health_check_data', dashboard.get_health_check_data)

-     CACHE.register('bodhi_overrides', dashboard.get_overrides)

+                    lambda url=app.config['PACKAGE_MAINTAINERS_JSON_URL']: dashboard_helpers.get_json(url))

+     CACHE.register('orphans_json', lambda url=app.config['ORPHANS_JSON_URL']: dashboard_helpers.get_json(url))

+     CACHE.register('koschei_data', koschei.parse_koschei_data)

+     CACHE.register('health_check_data', health_check.get_health_check_data)

+     CACHE.register('bodhi_overrides', bodhi.get_overrides)

      CACHE.register('bodhi_updates', all_bodhi_updates)

-     CACHE.register('ftbfs_trackers', dashboard.get_ftbfs_trackers)

-     CACHE.register('fti_trackers', dashboard.get_fti_trackers)

-     CACHE.register('blocker_trackers', dashboard.get_blocker_trackers)

+     CACHE.register('ftbfs_trackers', bugzilla.get_ftbfs_trackers)

+     CACHE.register('fti_trackers', bugzilla.get_fti_trackers)

+     CACHE.register('blocker_trackers', bugzilla.get_blocker_trackers)

  

      CACHE.register('packager-dashboard_user_data_static', dashboard_user_data_static)

-     CACHE.register('packager-dashboard_bugs', dashboard.get_package_bugs)

-     CACHE.register('packager-dashboard_pull_requests', dashboard.get_package_prs)

-     CACHE.register('packager-dashboard_package_versions', dashboard.get_package_versions)

+     CACHE.register('packager-dashboard_bugs', bugzilla.get_package_bugs)

+     CACHE.register('packager-dashboard_pull_requests', pagure.get_package_prs)

+     CACHE.register('packager-dashboard_package_versions', pagure.get_package_versions)

      CACHE.register('packager-dashboard_orphan_page', handle_orphan_user)

  

  

@@ -21,7 +21,7 @@ 

  from flask import jsonify

  

  from oraculum import app, CACHE

- from oraculum.utils import cache_utils, dashboard

+ from oraculum.utils import cache_utils, orphans, health_check, bodhi, koschei, pagure, dashboard_helpers

  

  def handle_orphan_user():

      orphans_data = list(CACHE.get('orphans_json')['affected_packages'].keys())
@@ -38,7 +38,7 @@ 

          'koschei': pkg_dict

          }

  

-     static_info['orphans'] = dashboard.get_orphans(static_info['packages'])

+     static_info['orphans'] = orphans.get_orphans(static_info['packages'])

  

      return {

          'static_info': {"data": static_info, 'status': 200, 'last_synced': CACHE.get_refreshed_time('orphans_json').isoformat()},
@@ -61,7 +61,7 @@ 

      else:

          static_info = {'status': 200, 'data': packages_promise, 'last_synced': last_synced.isoformat()}

          if len(static_info["data"]["packages"]) > 0:

-             dashboard.update_user_access_time(user)

+             dashboard_helpers.update_user_access_time(user)

  

      prs = dashboard_user_data_prs(user)

      bzs = dashboard_user_data_bzs(user)
@@ -71,7 +71,7 @@ 

  

  

  def dashboard_user_data_static(user):

-     packages = dashboard.get_packages(user, CACHE.get('packages_owners_json'), CACHE.get('pagure_groups'))

+     packages = pagure.get_packages(user, CACHE.get('packages_owners_json'), CACHE.get('pagure_groups'))

      # Just throw out empty lists and dicts for users without any packages

      if len(packages["combined"]) == 0:

          return {
@@ -88,11 +88,11 @@ 

          'packages': packages["combined"],

          'group_packages': packages["group"],

          'primary_packages': packages["primary"],

-         'orphans': dashboard.get_orphans(packages["combined"]),

-         'fails_to_install': dashboard.get_health_check_user_data(packages["combined"]),

-         'updates': dashboard.get_updates(packages["combined"], CACHE.get('bodhi_updates')),

-         'overrides': dashboard.get_user_overrides(packages["combined"], CACHE.get('bodhi_overrides')),

-         'koschei': dashboard.get_user_koschei_data(packages["combined"]),

+         'orphans': orphans.get_orphans(packages["combined"]),

+         'fails_to_install': health_check.get_health_check_user_data(packages["combined"]),

+         'updates': bodhi.get_updates(packages["combined"], CACHE.get('bodhi_updates')),

+         'overrides': bodhi.get_user_overrides(packages["combined"], CACHE.get('bodhi_overrides')),

+         'koschei': koschei.get_user_koschei_data(packages["combined"]),

      }

  

  
@@ -107,6 +107,7 @@ 

          status = 202

      return {"status": status, "data": data}

  

+ 

  def dashboard_user_data_prs(user):

      data = CACHE.async_get('packager-dashboard_user_data_static', 'low', user)

      if data in [cache_utils.RefresherNotRegistered, cache_utils.AsyncRefreshInProgress]:

@@ -0,0 +1,197 @@ 

+ #

+ # bodhi.py - Bodhi parsers and utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ from bodhi.client.bindings import BodhiClient

+ 

+ import datetime

+ 

+ from oraculum import app

+ from oraculum.utils.dashboard_helpers import release_from_nevra, name_in_nevra

+ 

+ 

+ def query_bodhi(bodhi_client, release, pending=False):

+     """Deal with querying bodhi and combining all relevant pages into a

+     single list of updates."""

+     query_args = {"release": str(release),

+                   "rows_per_page": 50,

+                   }

+     if pending:

+         query_args["request"] = "testing"

+         query_args["status"] = "pending"

+     else:

+         query_args["status"] = "testing"

+ 

+     updates = []

+     try:

+         # since bodhi has a query limit but multiple pages, get ALL of the

+         # updates before starting to process

+         result = bodhi_client.query(**query_args)

+         updates.extend(result['updates'])

+         while result.page < result.pages:

+             next_page = result['page'] + 1

+             app.logger.debug("Fetching updates page {} of {}".format(

+                 next_page, result['pages']))

+             result = bodhi_client.query(page=next_page, **query_args)

+             updates.extend(result['updates'])

+     # There is no clear indication which Exceptions bc.query() might

+     # throw, therefore catch all (python-fedora-0.3.32.3-1.fc19)

+     except Exception as e:

+         app.logger.debug("Error while querying Bodhi: {0}".format(e))

+ 

+     return updates

+ 

+ 

+ def process_update(update):

+     """

+     Cleans up single update dictionary to contain only data frontend needs

+     """

+     return {

+         "pretty_name": update["title"],

+         "updateid": update["alias"],

+         "submission_date": update["date_submitted"],

+         "stable_by_time": str(

+                 datetime.datetime.fromisoformat(update["date_modified"] or update["date_submitted"]) +

+                 datetime.timedelta(days=update["stable_days"])

+             ) if update["autotime"] else None,

+         "release": release_from_nevra(update["title"]),

+         "url": update["url"],

+         "status": update["status"],

+         "karma": update["karma"],

+         "comments": len(update["comments"])

+     }

+ 

+ 

+ def process_override(override):

+     """

+     Cleans up single override dictionary to contain only data frontend needs

+     """

+     return {

+         "pretty_name": override["nvr"],

+         "url": "https://bodhi.fedoraproject.org/overrides/" + override["nvr"],

+         "submission_date": override["submission_date"],

+         "expiration_date": override["expiration_date"],

+         "release": release_from_nevra(override["nvr"])

+     }

+ 

+ 

+ def get_updates(packages, raw_updates):

+     """

+     Gets list of user owned packages and libkarma dump of all updates

+     Returns dict of user owned packages and updates for them

+     package_name: {

+         "pretty_name": update["title"],

+         "updateid": update["alias"],

+         "submission_date": update["date_submitted"],

+         "stable_by_time": (update["date_submitted"] + update["stable_days"]) if update["autotime"] else None,

+         "release": release_from_nevra(update["title"]),

+         "url": update["url"],

+         "status": update["status"],

+         "karma": update["karma"],

+         "comments": len(update["comments"])

+     }

+     """

+     if len(packages) == 0:

+         return {}

+     data = {}

+     # Prepare dict items for each package

+     for package in packages:

+         data[package] = []

+ 

+     # raw_updates["Fxx"][0...n]["builds"][0...n]["nvr"] = 'package_name-version-release'

+     # raw_updates["Fxx"][0...n]["alias"] = FEDORA-YYYY-UPDATE_ID

+     for release in raw_updates:

+         for package in packages:

+             present_updates = set()  # helper set to deduplicate updates containing more than one package

+             for update in raw_updates[release]:

+                 for build in update["builds"]:

+                     if name_in_nevra(package, build["nvr"]) and update["alias"] not in present_updates:

+                         data[package].append(process_update(update))

+                         present_updates.add(update["alias"])

+ 

+     return data

+ 

+ 

+ def get_overrides():

+     """

+     Returns list of all active buildroot overrides

+     """

+     bc = BodhiClient(username="oraculum",

+                      useragent="Fedora Easy Karma/GIT",

+                      retries=3)

+ 

+     query_args = {"expired": False,

+                   "rows_per_page": 50,

+                   }

+ 

+     overrides = []

+     try:

+         # since bodhi has a query limit but multiple pages, get ALL of the

+         # updates before starting to process

+         result = bc.list_overrides(**query_args)

+         overrides.extend(result['overrides'])

+         while result.page < result.pages:

+             next_page = result['page'] + 1

+             app.logger.debug("Fetching overrides page {} of {}".format(

+                 next_page, result['pages']))

+             result = bc.list_overrides(page=next_page, **query_args)

+             overrides.extend(result['overrides'])

+     # There is no clear indication which Exceptions bc.query() might

+     # throw, therefore catch all (python-fedora-0.3.32.3-1.fc19)

+     except Exception as e:

+         app.logger.error("Error while querying Bodhi: {0}".format(e))

+ 

+     return overrides

+ 

+ 

+ def get_user_overrides(packages, raw_overrides):

+     """

+     Returns all active buildroot overrides for single user

+     Formatted as dict of dicts:

+     package_name: {

+         "pretty_name": override["nvr"],

+         "url": "https://bodhi.fedoraproject.org/overrides/" + override["nvr"],

+         "submission_date": override["submission_date"],

+         "expiration_date": override["expiration_date"],

+         "release": release_from_nevra(override["nvr"])

+     }

+     """

+     data = {}

+     for package in packages:

+         data[package] = []

+     for override in raw_overrides:

+         for package in packages:

+             if name_in_nevra(package, override["nvr"]):

+                 data[package].append(process_override(override))

+     return data

+ 

+ 

+ def get_updates_from_bodhi(release):

+     bc = BodhiClient(username="oraculum",

+                      useragent="Fedora Easy Karma/GIT",

+                      retries=3)

+ 

+     updates = query_bodhi(bc, release, False)

+     updates_pending = query_bodhi(bc, release, True)

+     updates.extend(updates_pending)

+ 

+     return updates

@@ -0,0 +1,117 @@ 

+ #

+ # bugzilla.py - Bugzilla parsers and utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ import urllib.parse

+ 

+ from oraculum import CACHE, BUGZILLA

+ from oraculum.utils.dashboard_helpers import release_from_number

+ 

+ 

+ def get_blocker_trackers():

+     tracker_ids = []

+     releases = CACHE.get('fedora_releases')

+     if releases["branched"]:

+         try:

+             tracker_ids.append(BUGZILLA.getbug("F%sBetaBlocker" % releases["branched"]).id)

+             tracker_ids.append(BUGZILLA.getbug("F%sFinalBlocker" % releases["branched"]).id)

+         except IndexError:

+             pass

+     try:

+         tracker_ids.append(BUGZILLA.getbug("F%sBetaBlocker" % releases["rawhide"]).id)

+         tracker_ids.append(BUGZILLA.getbug("F%sFinalBlocker" % releases["rawhide"]).id)

+     except IndexError:

+         pass

+     return tracker_ids

+ 

+ 

+ def get_fti_trackers():

+     tracker_ids = []

+     releases = CACHE.get('fedora_releases')["values"]

+     for release in releases:

+         try:

+             tracker_ids.append(BUGZILLA.getbug("F%sFailsToInstall" % release).id)

+         except IndexError:

+             continue

+     return tracker_ids

+ 

+ 

+ def get_ftbfs_trackers():

+     tracker_ids = []

+     releases = CACHE.get('fedora_releases')["values"]

+     for release in releases:

+         try:

+             tracker_ids.append(BUGZILLA.getbug("F%sFTBFS" % release).id)

+         except IndexError:

+             continue

+     return tracker_ids

+ 

+ 

+ def get_package_bugs(package):

+     """

+     Returns all open Bugs for a single package

+     """

+     data = []

+     query = BUGZILLA.url_to_query("https://bugzilla.redhat.com/buglist.cgi?bug_status=NEW&bug_status=__open__&"

+                                   "classification=Fedora&product=Fedora&product=Fedora EPEL&component=%s" % urllib.parse.quote(package))

+     query["include_fields"] = ['blocks', 'comments', 'creation_time', 'creator', 'id', 'keywords', 'last_change_time',

+                                'severity', 'priority', 'status', 'summary', 'version', 'whiteboard']

+     bugs = BUGZILLA.query(query)

+     if len(bugs) == 0:

+         return []

+     ftbfs_trackers = set(CACHE.get('ftbfs_trackers'))

+     fti_trackers = set(CACHE.get('fti_trackers'))

+     blocker_trackers = set(CACHE.get('blocker_trackers'))

+     for bug in bugs:

+         if bug.creator == 'Upstream Release Monitoring':

+             bug.keywords.append("ReleaseMonitoring")

+         for blocks in bug.blocks:

+             if blocks in ftbfs_trackers and "FTBFS" not in bug.keywords:

+                 bug.keywords.append("FTBFS")

+             if blocks in fti_trackers and "FTI" not in bug.keywords:

+                 bug.keywords.append("FTI")

+             if blocks in blocker_trackers and "AcceptedBlocker" in bug.whiteboard and "AcceptedBlocker" not in bug.keywords:

+                 bug.keywords.append("AcceptedBlocker")

+             if blocks in blocker_trackers and "AcceptedBlocker" not in bug.whiteboard and "ProposedBlocker" not in bug.keywords:

+                 bug.keywords.append("ProposedBlocker")

+         strs = ['unspecified', 'low', 'medium', 'high', 'urgent']

+         release = release_from_number(bug.version.replace("epel", "").replace("el", ""))

+         if "[fedora-all]" in bug.summary:

+             release = "Fedora"

+         if "[epel-all]" in bug.summary:

+             release = "EPEL"

+         data.append({

+             "title": bug.summary,

+             "bug_id": bug.id,

+             "severity": bug.severity,

+             "priority": bug.priority,

+             "priority_severity": strs[max(strs.index(bug.priority), strs.index(bug.severity))],

+             "status": bug.status,

+             # moment.js on frontend requires RFC2822 datetime format, bugzilla returns RFC339

+             "modified": str(bug.last_change_time).replace("T", " ").replace("Z", ""),

+             "reported": str(bug.creation_time).replace("T", " ").replace("Z", ""),

+             "release": release,

+             "keywords": bug.keywords,

+             "comments": (len(bug.comments) - 1),  # Let's ignore the first comment that every bug has

+             "url": "https://bugzilla.redhat.com/%s" % bug.id

+         })

+     return data

file removed
-16
@@ -1,16 +0,0 @@ 

- import bugzilla

- 

- URL = "bugzilla.redhat.com"

- API_KEY = "ZBfkPOQdiiQezGUP1ZTIt8DpZiotdZAmfj4eu1PP"

- 

- 

- # FIXME bz user is email, not FAS :-(

- USER_BUGS_URL = "https://bugzilla.redhat.com/buglist.cgi?bug_status=NEW&bug_status=ASSIGNED&bug_status=POST&bug_status=MODIFIED&bug_status=ON_DEV&bug_status=ON_QA&bug_status=VERIFIED&bug_status=RELEASE_PENDING&email1=%s&emailassigned_to1=1&emailcc1=1&emaildocs_contact1=1&emaillongdesc1=1&emailqa_contact1=1&emailreporter1=1&emailtype1=substring&list_id=9012930&query_format=advanced"

- 

- bzapi = bugzilla.Bugzilla(URL)

- 

- 

- def get_user_bugs(user):

-     query = bzapi.url_to_query(USER_BUGS_URL % user)

-     bugs = bzapi.query(query)

-     return bugs

@@ -1,855 +0,0 @@ 

- #

- # dashboard.py - Utilities to parse data sources for packager dashboard

- #

- # Copyright 2020, Red Hat, Inc

- #

- # This program is free software; you can redistribute it and/or modify

- # it under the terms of the GNU General Public License as published by

- # the Free Software Foundation; either version 2 of the License, or

- # (at your option) any later version.

- #

- # This program is distributed in the hope that it will be useful,

- # but WITHOUT ANY WARRANTY; without even the implied warranty of

- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

- # GNU General Public License for more details.

- #

- # You should have received a copy of the GNU General Public License along

- # with this program; if not, write to the Free Software Foundation, Inc.,

- # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

- #

- # Authors:

- #   Frantisek Zatloukal <fzatlouk@redhat.com>

- #   Josef Skladanka <jskladan@redhat.com>

- 

- from collections import defaultdict

- from json import JSONDecodeError

- 

- import fedfind.helpers

- import json

- import datetime

- import requests

- import bugzilla

- import koji

- import urllib.parse

- import re

- import igraph

- 

- from bodhi.client.bindings import BodhiClient

- from urllib3.util.retry import Retry

- from requests.adapters import HTTPAdapter

- from requests.exceptions import ConnectionError

- 

- from oraculum import app, db, CACHE, BUGZILLA

- from oraculum.models.dashboard_users import DashboardUserData

- 

- def get_fedora_releases():

-     """

-     Returns dict with keys containing active Fedora releases as described by keys.

-     "values" key contains list of active Fedora release numbers

-     """

-     releases = {"oldstable": (max(fedfind.helpers.get_current_stables()) - 1),

-                 "stable": max(fedfind.helpers.get_current_stables()),

-                 "branched": fedfind.helpers.get_current_release(branched=True),

-                 "rawhide": (fedfind.helpers.get_current_release(branched=True) + 1)}

- 

-     releases["values"] = sorted(set(releases.values()))

-     if releases["stable"] == releases["branched"]:

-         releases["branched"] = None

-     return releases

- 

- 

- def update_user_access_time(user):

-     """

-     Updates user last_accessed with current timestamp

-     """

-     row = DashboardUserData.query.filter_by(username=user).first()

-     if not row:

-         row = DashboardUserData(user, datetime.datetime.utcnow())

-         db.session.add(row)

-     else:

-         row.last_accessed = datetime.datetime.utcnow()

-     db.session.commit()

- 

- 

- def name_in_nevra(name, nevra):

-     """

-     Checks if name/nevra matches

-     """

-     #!!! - hawkey.split_nevra leaks memory, so we have to do this shit instead... FUN!

-     #if name == hawkey.split_nevra(nevra).name:

-     if name == nevra.rsplit('-', 2)[0]:

-         return True

-     else:

-         return False

- 

- def release_from_number(release_num):

-     """

-     Returns release string (Fedora XX) from number

-     Returns "Fedora Rawhide" if release number is equal to Rawhide number

-     """

-     releases = CACHE.get("fedora_releases")

-     if release_num.lower() == "rawhide" or release_num == releases["rawhide"]:

-         return "Fedora Rawhide"

-     if int(release_num) in app.config["EPEL_RELEASES"] or int(release_num) <= 6:

-         return "EPEL %s" % release_num

-     return "Fedora %s" % release_num

- 

- def release_from_nevra(nevra):

-     """

-     Returns Fedora XX or EPEL X from nevra

-     """

-     split = nevra.split(".")[-1]

-     if len(split) <= 1:

-         return "Unknown"

-     if "fedora-obsolete-packages" in nevra:

-         # fedora-obsolete-packages does not contain distag, workaround it

-         # eg. "fedora-obsolete-packages-32-51", release is "Fedora 32"

-         return release_from_dist(nevra.split("-")[3])

-     return release_from_dist(split)

- 

- def release_from_dist(dist):

-     """

-     Returns Fedora XX or EPEL X from dist (fcXX/elX or fXX/epelX)

-     """

-     releases = CACHE.get("fedora_releases")

-     dist = dist.replace("~bootstrap", "")

-     if "el" in dist or "epel" in dist.lower():

-         return "EPEL %s" % dist[-1:]

-     if int(dist[-2:]) == releases["rawhide"]:

-         return "Fedora Rawhide"

-     return "Fedora %s" % dist[-2:]

- 

- def release_is_active(release_string):

-     """

-     Gets release string (Fedora XX/EPEL X), returns False if the release is EOL, return True otherwise

-     """

-     releases = CACHE.get("fedora_releases")

-     if "Rawhide" in release_string:

-         return True

-     release_string = int(release_string.replace("Fedora ", "").replace("EPEL ", "")) # Get just the number

-     if release_string in releases["values"] or release_string in app.config["EPEL_RELEASES"]:

-         return True

-     return False

- 

- def process_update(update):

-     """

-     Cleans up single update dictionary to contain only data frontend needs

-     """

-     return {

-         "pretty_name": update["title"],

-         "updateid": update["alias"],

-         "submission_date": update["date_submitted"],

-         "stable_by_time": str(

-                 datetime.datetime.fromisoformat(update["date_modified"] or update["date_submitted"]) +

-                 datetime.timedelta(days=update["stable_days"])

-             ) if update["autotime"] else None,

-         "release": release_from_nevra(update["title"]),

-         "url": update["url"],

-         "status": update["status"],

-         "karma": update["karma"],

-         "comments": len(update["comments"])

-     }

- 

- 

- def process_override(override):

-     """

-     Cleans up single override dictionary to contain only data frontend needs

-     """

-     return {

-         "pretty_name": override["nvr"],

-         "url": "https://bodhi.fedoraproject.org/overrides/" + override["nvr"],

-         "submission_date": override["submission_date"],

-         "expiration_date": override["expiration_date"],

-         "release": release_from_nevra(override["nvr"])

-     }

- 

- class OrphanGraph(object):

- 

-     def __init__(self, orphans_data):

-         self.orphans_data = orphans_data

-         aff = orphans_data['affected_packages']

-         counter = 0

-         edges = []

-         self.pimap = {}

-         self.ipmap = {}

-         for i, p in enumerate(aff.keys()):

-             self.pimap[p] = i

-             self.ipmap[i] = p

-         self.ileafs = [self.pimap[p] for p in orphans_data['status_change'].keys()]

- 

-         for p in aff:

-             for d in aff[p]:

-                 edges.append((self.pimap[p],self.pimap[d]))

- 

-         self.g = igraph.Graph(directed=True)

-         self.g.add_vertices(len(self.pimap)+1)

-         self.g.add_edges(edges)

- 

- 

-     def get_package_info(self, package):

-         p = package

- 

-         if p not in self.orphans_data["affected_packages"]:

-             return {

-                 "orphaned": False,

-                 "depends_on_orphaned": False,

-                 "direct_dependencies": [],

-                 "remote_dependencies": [],

-                 "problematic_since": None,

-                 "dot_graph": "",

-                 "vis_js": {"nodes": [], "edges": []}

-             }

- 

-         paths = [_ for _ in self.g.get_shortest_paths(self.pimap[p], self.ileafs) if len(_) >= 2]

-         direct_deps = list(set([self.ipmap[l[-1]] for l in paths if len(l) == 2]))

-         remote_deps = list(set([self.ipmap[l[-1]] for l in paths if len(l) > 2]))

- 

-         try:

-             # the "Z"*20 hack is here to 'fool' the min operator into ignoring the package in question, when it's not in "status_change"

-             problematic_since = min([self.orphans_data["status_change"].get(d, "Z" * 20) for d in direct_deps + remote_deps + [p]])

-         except ValueError:

-             problematic_since = None

-         if problematic_since == "Z" * 20:

-             problematic_since = None

- 

-         dot_graph = ""

-         for path in paths:

-             dot_graph += ' -- '.join('"%s"' % self.ipmap[i] for i in path) + ";\n"

- 

-         _nodes = set()

-         _edges = set()

-         for path in paths:

-             _nodes.update(path)

-             for i in range(len(path)-1):

-                 _edges.add(tuple(path[i:i+2]))

- 

-         nodes = [{'id': n, 'label': self.ipmap[n]} for n in _nodes]

-         edges = [{'from': e[0], 'to': e[1]} for e in _edges]

- 

-         return {

-             "orphaned": p in self.orphans_data["status_change"].keys(),

-             "depends_on_orphaned": bool(len(self.orphans_data["affected_packages"][p])),

-             "direct_dependencies": direct_deps,

-             "remote_dependencies": remote_deps,

-             "problematic_since": problematic_since,

-             "dot_graph": dot_graph.strip(),

-             "vis_js": {'nodes': nodes, 'edges': edges},

-         }

- 

- def get_orphans(packages, orphans_data=None):

- 

-     if not orphans_data:

-         orphans_data = CACHE.get('orphans_json')

- 

-     graph = OrphanGraph(orphans_data)

-     orphans = {}

-     for package in packages:

-         orphans[package] = graph.get_package_info(package)

-     return orphans

- 

- 

- def get_json(json_url, attempt_retry=True, log_errors=True):

-     """

-     Returns json data from provided url

-     """

-     session = requests.Session()

-     retries = Retry(total=3,

-                     backoff_factor=0.1,

-                     status_forcelist=[429, 500, 502, 503, 504])

-     session.mount('https://', HTTPAdapter(max_retries=retries))

-     try:

-         resp = session.get(json_url)

-     except ConnectionError:

-         # Return none if retrying failed too

-         if log_errors:

-             app.logger.error("Request to %s failed after multiple retries." % json_url)

-         return None

-     if not attempt_retry:

-         try:

-             return json.loads(resp.text)

-         except JSONDecodeError:

-             if log_errors:

-                 app.logger.error("Request to %s failed because resource returned invalid data." % json_url)

-             return None

-     try:

-         return json.loads(resp.text)

-     except JSONDecodeError:

-         get_json(json_url, attempt_retry=False, log_errors=log_errors)

- 

- 

- def get_pagure_groups():

-     """

-     Returns dictionary mapping all packager groups in pagure, it's members and packages

-     "group_name" : {

-         "users": [user_a, ...],

-         "packages": [package_a, ...]

-     }

-     """

-     groups_users = {}

-     resp = get_json("https://src.fedoraproject.org/api/0/groups?per_page=100")  # TODO: Handle pagination properly

-     if not resp:

-         return None

-     allowed_groups = [re.compile(a) for a in app.config["ALLOWED_PACKAGER_GROUPS"]]

-     for group in resp["groups"]:

-         if not any(regex.match(group) for regex in allowed_groups):

-             continue

-         app.logger.debug("Checking out Pagure group %s" % group)

-         group_resp = get_json("https://src.fedoraproject.org/api/0/group/%s?projects=1&acl=commit" % group)

-         if not group_resp:

-             continue

-         try:

-             groups_users[group] = {

-                 "users": group_resp["members"],

-                 "packages": [project["name"] for project in group_resp["projects"]]

-             }

-         except TypeError:

-             app.logger.error("Skipped Pagure group %s because of an error" % group)

-             continue

-     return groups_users

- 

- 

- def get_user_group_packages(user, groups_map):

-     """

-     Returns list of packages user owns through a group

-     """

-     group_packages_dict = {}

-     group_packages_plain = set()

-     for group in groups_map:

-         if group == "packager":  # It seems every packager is in group "packager" and inherits zero packages through it

-             continue

-         if user in groups_map[group]["users"]:

-             group_packages_dict[group] = groups_map[group]["packages"]

-             for package in groups_map[group]["packages"]:

-                 group_packages_plain.add(package)

-     return group_packages_dict, group_packages_plain

- 

- 

- def get_packages(user, pkg_owners_map, groups_map):

-     """

-     Returns all packages owned by user (including those owned through a group)

-     returns dict of:       "combined" - list of all packages, owned both directly or through group

-                            "group" - dict (keys are group names) of packages owned only through group

-                            "primary" - list of packages owned only directly

-     """

-     # packages_owners_map['rpms']['some_package_name'] contains list of 'some_package_name' maintainers

-     if user == "orphan":  # blacklist orphan user which has lots of unnecessary packages and would choke up our servers

-         return {

-             "primary": [],

-             "group": {},

-             "combined": []

-         }

-     group_packages, group_packages_list = get_user_group_packages(user, groups_map)

-     primary_packages = {package for package in pkg_owners_map['rpms'] if user in pkg_owners_map['rpms'][package]}

-     if user in groups_map:

-         primary_packages = set(groups_map[user]["packages"]) # User is group, return group packages as primary packages

-     return {

-         "primary": list(primary_packages),

-         "group": group_packages,

-         "combined": list(primary_packages | group_packages_list)  # Join sets of primary and group packages

-     }

- 

- def get_package_versions(package):

-     versions = {}

-     releases = CACHE.get("fedora_releases")

-     for release in releases["values"][:-1]: # Skip Rawhide

-         versions[release_from_dist(str(release))] = {"stable": None, "testing": None}

-     # Append Rawhide in standardized naming format

-     versions["Fedora Rawhide"] = {"stable": None, "testing": None}

-     for release in app.config["EPEL_RELEASES"]:

-         versions[release_from_dist("el" + str(release))] = {"stable": None, "testing": None}

- 

-     bodhi_versions = get_json("https://src.fedoraproject.org/_dg/bodhi_updates/rpms/%s" % package)

-     if not bodhi_versions:

-         # Return dictionary we've prepared above containing empty fields for each release

-         # versions = {"Fedora XX": {"stable": None, "testing": None}, "Fedora Rawhide": {"stable": None, "testing": None}...}

-         return versions

-     for release in bodhi_versions["updates"]:

-         # Exclude ELN for now

-         if "ELN" in release:

-             continue

-         formatted_release = release_from_dist(release)

-         try:

-             versions[formatted_release]["stable"] = bodhi_versions["updates"][release]["stable"]

-         except KeyError:

-             pass

-         try:

-             versions[formatted_release]["testing"] = bodhi_versions["updates"][release]["testing"]

-         except KeyError:

-             pass

- 

-     # We need to query mdapi if we have None in Fedora stable values

-     for release in versions:

-         if "Fedora" in release and not versions[release]["stable"]:

-             if "Rawhide" in release:

-                 mdapi_response = get_json("https://mdapi.fedoraproject.org/%s/srcpkg/%s" % ("rawhide", package), False, False)

-             else:

-                 mdapi_response = get_json("https://mdapi.fedoraproject.org/%s/srcpkg/%s" % ("f" + release[-2:], package), False, False)

-             if not mdapi_response:

-                 continue

-             versions[release]["stable"] = package + "-" + mdapi_response["version"] + "-" + mdapi_response["release"]

- 

-     return versions

- 

- def get_updates(packages, raw_updates):

-     """

-     Gets list of user owned packages and libkarma dump of all updates

-     Returns dict of user owned packages and updates for them

-     package_name: {

-         "pretty_name": update["title"],

-         "updateid": update["alias"],

-         "submission_date": update["date_submitted"],

-         "stable_by_time": (update["date_submitted"] + update["stable_days"]) if update["autotime"] else None,

-         "release": release_from_nevra(update["title"]),

-         "url": update["url"],

-         "status": update["status"],

-         "karma": update["karma"],

-         "comments": len(update["comments"])

-     }

-     """

-     if len(packages) == 0:

-         return {}

-     data = {}

-     # Prepare dict items for each package

-     for package in packages:

-         data[package] = []

- 

-     # raw_updates["Fxx"][0...n]["builds"][0...n]["nvr"] = 'package_name-version-release'

-     # raw_updates["Fxx"][0...n]["alias"] = FEDORA-YYYY-UPDATE_ID

-     for release in raw_updates:

-         for package in packages:

-             present_updates = set()  # helper set to deduplicate updates containing more than one package

-             for update in raw_updates[release]:

-                 for build in update["builds"]:

-                     if name_in_nevra(package, build["nvr"]) and update["alias"] not in present_updates:

-                         data[package].append(process_update(update))

-                         present_updates.add(update["alias"])

- 

-     return data

- 

- def get_pr_ci_result(package, pr_id):

-     """

-     Return CI result for package pull-request

-     Returns {"ci-name": "result", "different-ci-name": "result",...} ,

-     "result" can be whatever a CI returns, there is no damned standard for it

-     "ci-name" can be anything, Zuul and simple-koji-ci will be the most common at the time of writing the code

-     """

-     resp_pr_ci_result = get_json("https://src.fedoraproject.org/api/0/rpms/%s/pull-request/%s/flag" % (package, pr_id))

-     data = {}

-     if not resp_pr_ci_result:

-         return {}

-     if "flags" not in resp_pr_ci_result:

-         return {}

-     for result in resp_pr_ci_result["flags"]:

-         # We care only about the newest results

-         if result["username"] not in data.keys():

-             data[result["username"]] = result["status"]

-     return data

- 

- 

- def get_package_prs(package):

-     """

-     Returns all open Pull Requests for a single package

-     """

-     data = []

-     resp_package_prs = get_json("https://src.fedoraproject.org/api/0/rpms/%s/pull-requests" % package)

-     if not resp_package_prs:

-         return None

-     # Filter out packages that, for some reason, do not exist in dist-git

-     if "error_code" in resp_package_prs and resp_package_prs["error_code"] == "ENOPROJECT":

-         return []

-     if not "requests" in resp_package_prs:

-         app.logger.error("Skipping PRs from package %s because Pagure returned invalid data" % package)

-         return []  # Return early if pagure sent invalid data

-     for request in resp_package_prs["requests"]:

-         if request["status"] == "Open":

-             data.append({

-                 "title": request["title"],

-                 "author": request["user"]['name'],

-                 "comments": len(request["comments"]),

-                 "date_created": str(datetime.datetime.fromtimestamp(int(request["date_created"]))),

-                 "last_updated": str(datetime.datetime.fromtimestamp(int(request["last_updated"]))),

-                 "ci_status": get_pr_ci_result(package, request["id"]),

-                 "release": "Fedora Rawhide" if request["branch"] == "master" else release_from_dist(request["branch"]),

-                 "url": "https://src.fedoraproject.org/rpms/%s/pull-request/%s" % (package, request["id"])

-             })

-     return data

- 

- def get_blocker_trackers():

-     tracker_ids = []

-     releases = CACHE.get('fedora_releases')

-     if releases["branched"]:

-         try:

-             tracker_ids.append(BUGZILLA.getbug("F%sBetaBlocker" % releases["branched"]).id)

-             tracker_ids.append(BUGZILLA.getbug("F%sFinalBlocker" % releases["branched"]).id)

-         except IndexError:

-             pass

-     try:

-         tracker_ids.append(BUGZILLA.getbug("F%sBetaBlocker" % releases["rawhide"]).id)

-         tracker_ids.append(BUGZILLA.getbug("F%sFinalBlocker" % releases["rawhide"]).id)

-     except IndexError:

-         pass

-     return tracker_ids

- 

- def get_fti_trackers():

-     tracker_ids = []

-     releases = CACHE.get('fedora_releases')["values"]

-     for release in releases:

-         try:

-             tracker_ids.append(BUGZILLA.getbug("F%sFailsToInstall" % release).id)

-         except IndexError:

-             continue

-     return tracker_ids

- 

- 

- def get_ftbfs_trackers():

-     tracker_ids = []

-     releases = CACHE.get('fedora_releases')["values"]

-     for release in releases:

-         try:

-             tracker_ids.append(BUGZILLA.getbug("F%sFTBFS" % release).id)

-         except IndexError:

-             continue

-     return tracker_ids

- 

- 

- def get_package_bugs(package):

-     """

-     Returns all open Bugs for a single package

-     """

-     data = []

-     query = BUGZILLA.url_to_query("https://bugzilla.redhat.com/buglist.cgi?bug_status=NEW&bug_status=__open__&"

-                                   "classification=Fedora&product=Fedora&product=Fedora EPEL&component=%s" % urllib.parse.quote(package))

-     query["include_fields"] = ['blocks', 'comments', 'creation_time', 'creator', 'id', 'keywords', 'last_change_time',

-                                'severity', 'priority', 'status', 'summary', 'version', 'whiteboard']

-     bugs = BUGZILLA.query(query)

-     if len(bugs) == 0:

-         return []

-     ftbfs_trackers = set(CACHE.get('ftbfs_trackers'))

-     fti_trackers = set(CACHE.get('fti_trackers'))

-     blocker_trackers = set(CACHE.get('blocker_trackers'))

-     for bug in bugs:

-         if bug.creator == 'Upstream Release Monitoring':

-             bug.keywords.append("ReleaseMonitoring")

-         for blocks in bug.blocks:

-             if blocks in ftbfs_trackers and "FTBFS" not in bug.keywords:

-                 bug.keywords.append("FTBFS")

-             if blocks in fti_trackers and "FTI" not in bug.keywords:

-                 bug.keywords.append("FTI")

-             if blocks in blocker_trackers and "AcceptedBlocker" in bug.whiteboard and "AcceptedBlocker" not in bug.keywords:

-                 bug.keywords.append("AcceptedBlocker")

-             if blocks in blocker_trackers and "AcceptedBlocker" not in bug.whiteboard and "ProposedBlocker" not in bug.keywords:

-                 bug.keywords.append("ProposedBlocker")

-         strs = ['unspecified', 'low', 'medium', 'high', 'urgent']

-         release = release_from_number(bug.version.replace("epel", "").replace("el", ""))

-         if "[fedora-all]" in bug.summary:

-             release = "Fedora"

-         if "[epel-all]" in bug.summary:

-             release = "EPEL"

-         data.append({

-             "title": bug.summary,

-             "bug_id": bug.id,

-             "severity": bug.severity,

-             "priority": bug.priority,

-             "priority_severity": strs[max(strs.index(bug.priority), strs.index(bug.severity))],

-             "status": bug.status,

-             # moment.js on frontend requires RFC2822 datetime format, bugzilla returns RFC339

-             "modified": str(bug.last_change_time).replace("T", " ").replace("Z", ""),

-             "reported": str(bug.creation_time).replace("T", " ").replace("Z", ""),

-             "release": release,

-             "keywords": bug.keywords,

-             "comments": (len(bug.comments) - 1),  # Let's ignore the first comment that every bug has

-             "url": "https://bugzilla.redhat.com/%s" % bug.id

-         })

-     return data

- 

- 

- def get_overrides():

-     """

-     Returns list of all active buildroot overrides

-     """

-     bc = BodhiClient(username="oraculum",

-                      useragent="Fedora Easy Karma/GIT",

-                      retries=3)

- 

-     query_args = {"expired": False,

-                   "rows_per_page": 50,

-                   }

- 

-     overrides = []

-     try:

-         # since bodhi has a query limit but multiple pages, get ALL of the

-         # updates before starting to process

-         result = bc.list_overrides(**query_args)

-         overrides.extend(result['overrides'])

-         while result.page < result.pages:

-             next_page = result['page'] + 1

-             app.logger.debug("Fetching overrides page {} of {}".format(

-                 next_page, result['pages']))

-             result = bc.list_overrides(page=next_page, **query_args)

-             overrides.extend(result['overrides'])

-     # There is no clear indication which Exceptions bc.query() might

-     # throw, therefore catch all (python-fedora-0.3.32.3-1.fc19)

-     except Exception as e:

-         app.logger.error("Error while querying Bodhi: {0}".format(e))

- 

-     return overrides

- 

- 

- def get_user_overrides(packages, raw_overrides):

-     """

-     Returns all active buildroot overrides for single user

-     Formatted as dict of dicts:

-     package_name: {

-         "pretty_name": override["nvr"],

-         "url": "https://bodhi.fedoraproject.org/overrides/" + override["nvr"],

-         "submission_date": override["submission_date"],

-         "expiration_date": override["expiration_date"],

-         "release": release_from_nevra(override["nvr"])

-     }

-     """

-     data = {}

-     for package in packages:

-         data[package] = []

-     for override in raw_overrides:

-         for package in packages:

-             if name_in_nevra(package, override["nvr"]):

-                 data[package].append(process_override(override))

-     return data

- 

- def process_koji_queue(session, data):

-     koji_results = session.multiCall()

-     for element in koji_results:

-         try:

-             # koji multicall returns results packed inside lists of one element, redefine element for better readability later

-             element = element[0][0]

-         except IndexError:

-             # Sometimes, koji returns incomplete results

-             continue

-         for entry in data[element["name"]]:

-             """

-             We need to iterate throughout all members of data["package_name"] to find and alter only the architecture we

-             now have in the element.

-             Example of element (only keys we use are listed below):

-                {'tag_name': 'fXX',

-                 'build_id': XYZ,

-                 'completion_time': 'YYYY-MM-DD HH:MM:SS.FFFFFF',

-                 'name': 'package_name',

-                 ...}

-             """

-             # Since dict doesn't guarantee order, we must pair koji result and data entry

-             if release_from_dist(element["tag_name"]) == entry["release"]:

-                 data[element["name"]][data[element["name"]].index(entry)]["last_success"] = {

-                     "time": element["completion_time"],

-                     "url": app.config["KOJI_URL"] + "buildinfo?buildID=" + str(element["build_id"])}

-                 # And we can jump to another element early if we found what we needed, yay!

-                 continue

-     return data

- 

- def parse_koschei_data():

-     """

-     Prepares data from koschei for easier parsing in get_user_koschei_data()

-     Returns dict of lists containing dicts: {

-         "package_name": [

-             {"release": "fedora_release", "status": "koschei status", "url": "https://koschei.fedoraproject.org/package/..."}

-         ]

-     }

-     """

-     data = defaultdict(list)

-     koschei_resp = get_json(app.config['KOSCHEI_API_URL'])

-     # Set up koji API

-     koji_session = koji.ClientSession(app.config['KOJIHUB_URL'])

-     koji_session.multicall = True

- 

-     for item in koschei_resp:

-         if "playground" in item["collection"]:

-             # We don't care about EPEL 8 Playground

-             continue

-         if not release_is_active(release_from_dist(item["collection"])):

-             # koschei might contain data for EOL Fedora releases, we don't care about those

-             continue

-         data[item["name"]].append({

-             "release": release_from_dist(item["collection"]),

-             "status": item["state"],

-             "url": "https://koschei.fedoraproject.org/package/%s?collection=%s" % (item["name"], item["collection"]),

-             "last_success": {"time": None, "url": None}})

-         if item["state"] == "failing":

-             koji_session.getLatestBuilds(item["collection"], package=item["name"])

- 

-     return process_koji_queue(koji_session, data)

- 

- 

- def get_user_koschei_data(packages):

-     """

-     Filters out koschei data to contain information only about given pacakges list

-     """

-     koschei = CACHE.get("koschei_data")

-     data = {}

-     for package in packages:

-         data[package] = koschei.get(package, [])

-     return data

- 

- def process_health_check_json(json_data):

-     """

-     Cleans up data for further health check processing into following format:

-     {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

-     """

-     data = defaultdict(dict)

-     for single_closure in json_data:

-         data[single_closure["package"]][single_closure["arch"]] = {

-             "reason": single_closure["broken"],

-             "since": single_closure["since"]

-         }

-     return data

- 

- def get_health_check_data():

-     """

-     Retrieves json files containing data from fedora-health-check and combines them into dictionary with per-release keys

-     Returns dict {

-         "FXX":         {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

-         "FXX-testing": {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

-         #Rawhide releases don't have -testing counterpart

-         "rawhide":     {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

-     }

-     Reason format is described below in get_health_check_user_data

-     """

-     data = {}

-     releases = CACHE.get('fedora_releases')["values"]

-     for release in releases[:-1]: # Process every release but rawhide

-         release = str(release)

-         # Remote resource might not have data for some Fedora Release, typically branched right after branching point

-         try:

-             data[release] = process_health_check_json(get_json(app.config["HEALTH_CHECK_URL"] + release))

-             data[release + "-testing"] = process_health_check_json(get_json(app.config["HEALTH_CHECK_URL"] + release + "-testing"))

-         except TypeError:

-             pass

-     try:

-         data["rawhide"] = process_health_check_json(get_json(app.config["HEALTH_CHECK_URL"] + "rawhide"))

-     except TypeError:

-         pass

-     return data

- 

- def get_repo_for_health_check(release):

-     """

-     Used to give frontend a hint if a failure comes from stable, testing or rawhide repos

-     """

-     if "-testing" in release:

-         return "testing"

-     if "rawhide" in release:

-         return "rawhide"

-     return "stable"

- 

- def get_health_check_user_data(packages):

-     """

-     Parses data from get_health_check_data and returns dict for package list:

-     {

-         "package_a":  {"fails_to_install": True/False, "releases": [{"release": XX, "problems": problems}]

-         ...

-     }

-     "problems": {

-         "arch_a": {"reason": ["package_a",...], "since": "2020-07-17T07:47:25.219142397Z"}

-         "arch_b": {"reason": ["package_a",...], "since": "2040-07-17T07:47:25.219142397Z"}

-         ...

-     }

-     """

-     fti_data = CACHE.get('health_check_data')

-     data = {}

-     for package in packages:

-         data[package] = []

-         for release in fti_data:

-             if package in fti_data[release]:

-                 data[package].append({"release": release_from_number(release.replace("-testing", "")), # We need to cut -testing,

-                                       "repo": get_repo_for_health_check(release),

-                                       "problems": fti_data[release][package]})

-     return data

- 

- 

- if __name__ == "__main__":

-     def test_orphans():

-         """

-           * notes packages being orphaned

-           A -- B -- C*

-                  -- D* -- E*

-                  -- E*

-             -- F*

-         """

-         DATA = {

-                 "affected_packages": {

-                     'A': ['B', 'F'],

-                     'B': ['C', 'D', 'E'],

-                     'C': [],

-                     'D': ['E'],

-                     'E': [],

-                     'F': []

-                 },

-                 "status_change": {

-                     'C': '2020-01-01T00:00:00',

-                     'D': '2020-01-01T00:00:01',

-                     'E': '2020-01-01T00:00:02',

-                     'F': '2020-01-01T00:00:03',

- 

-                 }

-         }

-         out = get_orphans(["A", "F", "Z"], DATA)

-         # Not affected

-         o_z = {

-             "orphaned": False,

-             "depends_on_orphaned": False,

-             "direct_dependencies": [],

-             "remote_dependencies": [],

-             "problematic_since": None,

-             "dot_graph": "",

-             "vis_js": {"nodes": [], "edges": []}

-             }

-         assert out["Z"] == o_z

- 

-         # Directly orphaned

-         o_f = {

-             "orphaned": True,

-             "depends_on_orphaned": False,

-             "direct_dependencies": [],

-             "remote_dependencies": [],

-             "problematic_since": '2020-01-01T00:00:03',

-             "dot_graph": "",

-             "vis_js": {"nodes": [], "edges": []}

-             }

-         assert out["F"] == o_f

- 

-         # Complete example

-         o_a = {

-             "orphaned": False,

-             "depends_on_orphaned": True,

-             "direct_dependencies": ['F'],

-             "remote_dependencies": ['C', 'D', 'E'],

-             "problematic_since": '2020-01-01T00:00:00'

-             }

-         dg = sorted(out["A"]["dot_graph"].split('\n'))

-         vis_js = out["A"]["vis_js"]

-         del(out["A"]["dot_graph"])

-         del(out["A"]["vis_js"])

- 

-         out["A"]["remote_dependencies"].sort()

-         assert out["A"] == o_a

- 

-         o_dg = sorted([

-             '"A" -- "B" -- "C";',

-             '"A" -- "B" -- "D";',

-             '"A" -- "B" -- "E";',

-             '"A" -- "F";'

-             ])

-         assert o_dg == dg

- 

-         assert len(vis_js.keys()) == 2

-         assert 'nodes' in vis_js.keys()

-         assert 'edges' in vis_js.keys()

- 

-         nodes = sorted([(n['id'], n['label']) for n in vis_js['nodes']])

-         assert nodes == [(0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F')]

- 

-         edges = sorted([(e['from'], e['to']) for e in vis_js['edges']])

-         assert edges == [(0, 1), (0, 5), (1, 2), (1, 3), (1, 4)]

- 

-         print("OK")

- 

- 

-     test_orphans()

- 

@@ -0,0 +1,158 @@ 

+ #

+ # dashboard_helpers.py - Various utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ import json

+ import requests

+ import datetime

+ import fedfind.helpers

+ 

+ from urllib3.util.retry import Retry

+ from requests.adapters import HTTPAdapter

+ from requests.exceptions import ConnectionError

+ from json import JSONDecodeError

+ 

+ from oraculum import app, CACHE, db

+ from oraculum.models.dashboard_users import DashboardUserData

+ 

+ 

+ def get_json(json_url, attempt_retry=True, log_errors=True):

+     """

+     Returns json data from provided url

+     """

+     session = requests.Session()

+     retries = Retry(total=3,

+                     backoff_factor=0.1,

+                     status_forcelist=[429, 500, 502, 503, 504])

+     session.mount('https://', HTTPAdapter(max_retries=retries))

+     try:

+         resp = session.get(json_url)

+     except ConnectionError:

+         # Return none if retrying failed too

+         if log_errors:

+             app.logger.error("Request to %s failed after multiple retries." % json_url)

+         return None

+     if not attempt_retry:

+         try:

+             return json.loads(resp.text)

+         except JSONDecodeError:

+             if log_errors:

+                 app.logger.error("Request to %s failed because resource returned invalid data." % json_url)

+             return None

+     try:

+         return json.loads(resp.text)

+     except JSONDecodeError:

+         get_json(json_url, attempt_retry=False, log_errors=log_errors)

+ 

+ 

+ def get_fedora_releases():

+     """

+     Returns dict with keys containing active Fedora releases as described by keys.

+     "values" key contains list of active Fedora release numbers

+     """

+     releases = {"oldstable": (max(fedfind.helpers.get_current_stables()) - 1),

+                 "stable": max(fedfind.helpers.get_current_stables()),

+                 "branched": fedfind.helpers.get_current_release(branched=True),

+                 "rawhide": (fedfind.helpers.get_current_release(branched=True) + 1)}

+ 

+     releases["values"] = sorted(set(releases.values()))

+     if releases["stable"] == releases["branched"]:

+         releases["branched"] = None

+     return releases

+ 

+ 

+ def update_user_access_time(user):

+     """

+     Updates user last_accessed with current timestamp

+     """

+     row = DashboardUserData.query.filter_by(username=user).first()

+     if not row:

+         row = DashboardUserData(user, datetime.datetime.utcnow())

+         db.session.add(row)

+     else:

+         row.last_accessed = datetime.datetime.utcnow()

+     db.session.commit()

+ 

+ 

+ def name_in_nevra(name, nevra):

+     """

+     Checks if name/nevra matches

+     """

+     #!!! - hawkey.split_nevra leaks memory, so we have to do this shit instead... FUN!

+     #if name == hawkey.split_nevra(nevra).name:

+     if name == nevra.rsplit('-', 2)[0]:

+         return True

+     else:

+         return False

+ 

+ 

+ def release_from_number(release_num):

+     """

+     Returns release string (Fedora XX) from number

+     Returns "Fedora Rawhide" if release number is equal to Rawhide number

+     """

+     releases = CACHE.get("fedora_releases")

+     if release_num.lower() == "rawhide" or release_num == releases["rawhide"]:

+         return "Fedora Rawhide"

+     if int(release_num) in app.config["EPEL_RELEASES"] or int(release_num) <= 6:

+         return "EPEL %s" % release_num

+     return "Fedora %s" % release_num

+ 

+ 

+ def release_from_nevra(nevra):

+     """

+     Returns Fedora XX or EPEL X from nevra

+     """

+     split = nevra.split(".")[-1]

+     if len(split) <= 1:

+         return "Unknown"

+     if "fedora-obsolete-packages" in nevra:

+         # fedora-obsolete-packages does not contain distag, workaround it

+         # eg. "fedora-obsolete-packages-32-51", release is "Fedora 32"

+         return release_from_dist(nevra.split("-")[3])

+     return release_from_dist(split)

+ 

+ 

+ def release_from_dist(dist):

+     """

+     Returns Fedora XX or EPEL X from dist (fcXX/elX or fXX/epelX)

+     """

+     releases = CACHE.get("fedora_releases")

+     dist = dist.replace("~bootstrap", "")

+     if "el" in dist or "epel" in dist.lower():

+         return "EPEL %s" % dist[-1:]

+     if int(dist[-2:]) == releases["rawhide"]:

+         return "Fedora Rawhide"

+     return "Fedora %s" % dist[-2:]

+ 

+ 

+ def release_is_active(release_string):

+     """

+     Gets release string (Fedora XX/EPEL X), returns False if the release is EOL, return True otherwise

+     """

+     releases = CACHE.get("fedora_releases")

+     if "Rawhide" in release_string:

+         return True

+     release_string = int(release_string.replace("Fedora ", "").replace("EPEL ", "")) # Get just the number

+     if release_string in releases["values"] or release_string in app.config["EPEL_RELEASES"]:

+         return True

+     return False

@@ -0,0 +1,106 @@ 

+ #

+ # health_check.py - Health Check parsers and utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ from collections import defaultdict

+ 

+ from oraculum import app, CACHE

+ 

+ from oraculum.utils.dashboard_helpers import get_json, release_from_number

+ 

+ 

+ def process_health_check_json(json_data):

+     """

+     Cleans up data for further health check processing into following format:

+     {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

+     """

+     data = defaultdict(dict)

+     for single_closure in json_data:

+         data[single_closure["package"]][single_closure["arch"]] = {

+             "reason": single_closure["broken"],

+             "since": single_closure["since"]

+         }

+     return data

+ 

+ 

+ def get_health_check_data():

+     """

+     Retrieves json files containing data from fedora-health-check and combines them into dictionary with per-release keys

+     Returns dict {

+         "FXX":         {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

+         "FXX-testing": {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

+         #Rawhide releases don't have -testing counterpart

+         "rawhide":     {"package": {"arch": ["reason_a", "reason_b"], "arch_b": ["reason_a, reason_c"]}, "package_b"...}

+     }

+     Reason format is described below in get_health_check_user_data

+     """

+     data = {}

+     releases = CACHE.get('fedora_releases')["values"]

+     for release in releases[:-1]: # Process every release but rawhide

+         release = str(release)

+         # Remote resource might not have data for some Fedora Release, typically branched right after branching point

+         try:

+             data[release] = process_health_check_json(get_json(app.config["HEALTH_CHECK_URL"] + release))

+             data[release + "-testing"] = process_health_check_json(get_json(app.config["HEALTH_CHECK_URL"] + release + "-testing"))

+         except TypeError:

+             pass

+     try:

+         data["rawhide"] = process_health_check_json(get_json(app.config["HEALTH_CHECK_URL"] + "rawhide"))

+     except TypeError:

+         pass

+     return data

+ 

+ 

+ def get_repo_for_health_check(release):

+     """

+     Used to give frontend a hint if a failure comes from stable, testing or rawhide repos

+     """

+     if "-testing" in release:

+         return "testing"

+     if "rawhide" in release:

+         return "rawhide"

+     return "stable"

+ 

+ 

+ def get_health_check_user_data(packages):

+     """

+     Parses data from get_health_check_data and returns dict for package list:

+     {

+         "package_a":  {"fails_to_install": True/False, "releases": [{"release": XX, "problems": problems}]

+         ...

+     }

+     "problems": {

+         "arch_a": {"reason": ["package_a",...], "since": "2020-07-17T07:47:25.219142397Z"}

+         "arch_b": {"reason": ["package_a",...], "since": "2040-07-17T07:47:25.219142397Z"}

+         ...

+     }

+     """

+     fti_data = CACHE.get('health_check_data')

+     data = {}

+     for package in packages:

+         data[package] = []

+         for release in fti_data:

+             if package in fti_data[release]:

+                 data[package].append({"release": release_from_number(release.replace("-testing", "")), # We need to cut -testing,

+                                       "repo": get_repo_for_health_check(release),

+                                       "problems": fti_data[release][package]})

+     return data

@@ -0,0 +1,103 @@ 

+ #

+ # koschei.py - Koschei parsers and utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ from collections import defaultdict

+ 

+ import koji

+ 

+ from oraculum import app, CACHE

+ from oraculum.utils.dashboard_helpers import get_json, release_from_dist, release_is_active

+ 

+ 

+ def process_koji_queue(session, data):

+     koji_results = session.multiCall()

+     for element in koji_results:

+         try:

+             # koji multicall returns results packed inside lists of one element, redefine element for better readability later

+             element = element[0][0]

+         except IndexError:

+             # Sometimes, koji returns incomplete results

+             continue

+         for entry in data[element["name"]]:

+             """

+             We need to iterate throughout all members of data["package_name"] to find and alter only the architecture we

+             now have in the element.

+             Example of element (only keys we use are listed below):

+                {'tag_name': 'fXX',

+                 'build_id': XYZ,

+                 'completion_time': 'YYYY-MM-DD HH:MM:SS.FFFFFF',

+                 'name': 'package_name',

+                 ...}

+             """

+             # Since dict doesn't guarantee order, we must pair koji result and data entry

+             if release_from_dist(element["tag_name"]) == entry["release"]:

+                 data[element["name"]][data[element["name"]].index(entry)]["last_success"] = {

+                     "time": element["completion_time"],

+                     "url": app.config["KOJI_URL"] + "buildinfo?buildID=" + str(element["build_id"])}

+                 # And we can jump to another element early if we found what we needed, yay!

+                 continue

+     return data

+ 

+ 

+ def parse_koschei_data():

+     """

+     Prepares data from koschei for easier parsing in get_user_koschei_data()

+     Returns dict of lists containing dicts: {

+         "package_name": [

+             {"release": "fedora_release", "status": "koschei status", "url": "https://koschei.fedoraproject.org/package/..."}

+         ]

+     }

+     """

+     data = defaultdict(list)

+     koschei_resp = get_json(app.config['KOSCHEI_API_URL'])

+     # Set up koji API

+     koji_session = koji.ClientSession(app.config['KOJIHUB_URL'])

+     koji_session.multicall = True

+ 

+     for item in koschei_resp:

+         if "playground" in item["collection"]:

+             # We don't care about EPEL 8 Playground

+             continue

+         if not release_is_active(release_from_dist(item["collection"])):

+             # koschei might contain data for EOL Fedora releases, we don't care about those

+             continue

+         data[item["name"]].append({

+             "release": release_from_dist(item["collection"]),

+             "status": item["state"],

+             "url": "https://koschei.fedoraproject.org/package/%s?collection=%s" % (item["name"], item["collection"]),

+             "last_success": {"time": None, "url": None}})

+         if item["state"] == "failing":

+             koji_session.getLatestBuilds(item["collection"], package=item["name"])

+ 

+     return process_koji_queue(koji_session, data)

+ 

+ 

+ def get_user_koschei_data(packages):

+     """

+     Filters out koschei data to contain information only about given pacakges list

+     """

+     koschei = CACHE.get("koschei_data")

+     data = {}

+     for package in packages:

+         data[package] = koschei.get(package, [])

+     return data

@@ -1,55 +0,0 @@ 

- import logging

- 

- from bodhi.client.bindings import BodhiClient

- 

- 

- def query_bodhi(bodhi_client, release, pending=False, logger=None):

-     """Deal with querying bodhi and combining all relevant pages into a

-     single list of updates."""

-     if not logger:

-         logger = logging.getLogger(__name__)

- 

-     query_args = {"release": str(release),

-                   "rows_per_page": 50,

-                   }

-     if pending:

-         query_args["request"] = "testing"

-         query_args["status"] = "pending"

-     else:

-         query_args["status"] = "testing"

- 

-     updates = []

-     try:

-         # since bodhi has a query limit but multiple pages, get ALL of the

-         # updates before starting to process

-         result = bodhi_client.query(**query_args)

-         updates.extend(result['updates'])

-         while result.page < result.pages:

-             next_page = result['page'] + 1

-             logger.debug("Fetching updates page {} of {}".format(

-                 next_page, result['pages']))

-             result = bodhi_client.query(page=next_page, **query_args)

-             updates.extend(result['updates'])

-     # There is no clear indication which Exceptions bc.query() might

-     # throw, therefore catch all (python-fedora-0.3.32.3-1.fc19)

-     except Exception as e:

-         logger.debug("Error while querying Bodhi: {0}".format(e))

- 

-     return updates

- 

- 

- def get_updates(release, logger=None):

-     bc = BodhiClient(username="oraculum",

-                      useragent="Fedora Easy Karma/GIT",

-                      retries=3)

- 

-     updates = query_bodhi(bc, release, False, logger)

-     updates_pending = query_bodhi(bc, release, True, logger)

-     updates.extend(updates_pending)

- 

-     return updates

- 

- 

- if __name__ == "__main__":

-     import ipdb

-     ipdb.set_trace()

@@ -0,0 +1,201 @@ 

+ #

+ # orphans.py - Orphans parsers and utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ import igraph

+ 

+ from oraculum import CACHE

+ 

+ 

+ class OrphanGraph(object):

+ 

+     def __init__(self, orphans_data):

+         self.orphans_data = orphans_data

+         aff = orphans_data['affected_packages']

+         edges = []

+         self.pimap = {}

+         self.ipmap = {}

+         for i, p in enumerate(aff.keys()):

+             self.pimap[p] = i

+             self.ipmap[i] = p

+         self.ileafs = [self.pimap[p] for p in orphans_data['status_change'].keys()]

+ 

+         for p in aff:

+             for d in aff[p]:

+                 edges.append((self.pimap[p], self.pimap[d]))

+ 

+         self.g = igraph.Graph(directed=True)

+         self.g.add_vertices(len(self.pimap)+1)

+         self.g.add_edges(edges)

+ 

+     def get_package_info(self, package):

+         p = package

+ 

+         if p not in self.orphans_data["affected_packages"]:

+             return {

+                 "orphaned": False,

+                 "depends_on_orphaned": False,

+                 "direct_dependencies": [],

+                 "remote_dependencies": [],

+                 "problematic_since": None,

+                 "dot_graph": "",

+                 "vis_js": {"nodes": [], "edges": []}

+             }

+ 

+         paths = [_ for _ in self.g.get_shortest_paths(self.pimap[p], self.ileafs) if len(_) >= 2]

+         direct_deps = list(set([self.ipmap[l[-1]] for l in paths if len(l) == 2]))

+         remote_deps = list(set([self.ipmap[l[-1]] for l in paths if len(l) > 2]))

+ 

+         try:

+             # the "Z"*20 hack is here to 'fool' the min operator into ignoring the package in question, when it's not in "status_change"

+             problematic_since = min([self.orphans_data["status_change"].get(d, "Z" * 20) for d in direct_deps + remote_deps + [p]])

+         except ValueError:

+             problematic_since = None

+         if problematic_since == "Z" * 20:

+             problematic_since = None

+ 

+         dot_graph = ""

+         for path in paths:

+             dot_graph += ' -- '.join('"%s"' % self.ipmap[i] for i in path) + ";\n"

+ 

+         _nodes = set()

+         _edges = set()

+         for path in paths:

+             _nodes.update(path)

+             for i in range(len(path)-1):

+                 _edges.add(tuple(path[i:i+2]))

+ 

+         nodes = [{'id': n, 'label': self.ipmap[n]} for n in _nodes]

+         edges = [{'from': e[0], 'to': e[1]} for e in _edges]

+ 

+         return {

+             "orphaned": p in self.orphans_data["status_change"].keys(),

+             "depends_on_orphaned": bool(len(self.orphans_data["affected_packages"][p])),

+             "direct_dependencies": direct_deps,

+             "remote_dependencies": remote_deps,

+             "problematic_since": problematic_since,

+             "dot_graph": dot_graph.strip(),

+             "vis_js": {'nodes': nodes, 'edges': edges},

+         }

+ 

+ 

+ def get_orphans(packages, orphans_data=None):

+ 

+     if not orphans_data:

+         orphans_data = CACHE.get('orphans_json')

+ 

+     graph = OrphanGraph(orphans_data)

+     orphans = {}

+     for package in packages:

+         orphans[package] = graph.get_package_info(package)

+     return orphans

+ 

+ 

+ if __name__ == "__main__":

+     def test_orphans():

+         """

+           * notes packages being orphaned

+           A -- B -- C*

+                  -- D* -- E*

+                  -- E*

+             -- F*

+         """

+         DATA = {

+                 "affected_packages": {

+                     'A': ['B', 'F'],

+                     'B': ['C', 'D', 'E'],

+                     'C': [],

+                     'D': ['E'],

+                     'E': [],

+                     'F': []

+                 },

+                 "status_change": {

+                     'C': '2020-01-01T00:00:00',

+                     'D': '2020-01-01T00:00:01',

+                     'E': '2020-01-01T00:00:02',

+                     'F': '2020-01-01T00:00:03',

+ 

+                 }

+         }

+         out = get_orphans(["A", "F", "Z"], DATA)

+         # Not affected

+         o_z = {

+             "orphaned": False,

+             "depends_on_orphaned": False,

+             "direct_dependencies": [],

+             "remote_dependencies": [],

+             "problematic_since": None,

+             "dot_graph": "",

+             "vis_js": {"nodes": [], "edges": []}

+             }

+         assert out["Z"] == o_z

+ 

+         # Directly orphaned

+         o_f = {

+             "orphaned": True,

+             "depends_on_orphaned": False,

+             "direct_dependencies": [],

+             "remote_dependencies": [],

+             "problematic_since": '2020-01-01T00:00:03',

+             "dot_graph": "",

+             "vis_js": {"nodes": [], "edges": []}

+             }

+         assert out["F"] == o_f

+ 

+         # Complete example

+         o_a = {

+             "orphaned": False,

+             "depends_on_orphaned": True,

+             "direct_dependencies": ['F'],

+             "remote_dependencies": ['C', 'D', 'E'],

+             "problematic_since": '2020-01-01T00:00:00'

+             }

+         dg = sorted(out["A"]["dot_graph"].split('\n'))

+         vis_js = out["A"]["vis_js"]

+         del(out["A"]["dot_graph"])

+         del(out["A"]["vis_js"])

+ 

+         out["A"]["remote_dependencies"].sort()

+         assert out["A"] == o_a

+ 

+         o_dg = sorted([

+             '"A" -- "B" -- "C";',

+             '"A" -- "B" -- "D";',

+             '"A" -- "B" -- "E";',

+             '"A" -- "F";'

+             ])

+         assert o_dg == dg

+ 

+         assert len(vis_js.keys()) == 2

+         assert 'nodes' in vis_js.keys()

+         assert 'edges' in vis_js.keys()

+ 

+         nodes = sorted([(n['id'], n['label']) for n in vis_js['nodes']])

+         assert nodes == [(0, 'A'), (1, 'B'), (2, 'C'), (3, 'D'), (4, 'E'), (5, 'F')]

+ 

+         edges = sorted([(e['from'], e['to']) for e in vis_js['edges']])

+         assert edges == [(0, 1), (0, 5), (1, 2), (1, 3), (1, 4)]

+ 

+         print("OK")

+ 

+ 

+     test_orphans()

@@ -0,0 +1,192 @@ 

+ #

+ # pagure.py - Pagure parsers and utilities for packager dashboard

+ #

+ # Copyright 2020, Red Hat, Inc

+ #

+ # This program is free software; you can redistribute it and/or modify

+ # it under the terms of the GNU General Public License as published by

+ # the Free Software Foundation; either version 2 of the License, or

+ # (at your option) any later version.

+ #

+ # This program is distributed in the hope that it will be useful,

+ # but WITHOUT ANY WARRANTY; without even the implied warranty of

+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the

+ # GNU General Public License for more details.

+ #

+ # You should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation, Inc.,

+ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

+ #

+ # Authors:

+ #   Frantisek Zatloukal <fzatlouk@redhat.com>

+ #   Josef Skladanka <jskladan@redhat.com>

+ 

+ import datetime

+ import re

+ 

+ from oraculum.utils.dashboard_helpers import get_json, release_from_dist

+ 

+ from oraculum import app, CACHE

+ 

+ 

+ def get_pr_ci_result(package, pr_id):

+     """

+     Return CI result for package pull-request

+     Returns {"ci-name": "result", "different-ci-name": "result",...} ,

+     "result" can be whatever a CI returns, there is no damned standard for it

+     "ci-name" can be anything, Zuul and simple-koji-ci will be the most common at the time of writing the code

+     """

+     resp_pr_ci_result = get_json("https://src.fedoraproject.org/api/0/rpms/%s/pull-request/%s/flag" % (package, pr_id))

+     data = {}

+     if not resp_pr_ci_result:

+         return {}

+     if "flags" not in resp_pr_ci_result:

+         return {}

+     for result in resp_pr_ci_result["flags"]:

+         # We care only about the newest results

+         if result["username"] not in data.keys():

+             data[result["username"]] = result["status"]

+     return data

+ 

+ 

+ def get_package_prs(package):

+     """

+     Returns all open Pull Requests for a single package

+     """

+     data = []

+     resp_package_prs = get_json("https://src.fedoraproject.org/api/0/rpms/%s/pull-requests" % package)

+     if not resp_package_prs:

+         return None

+     # Filter out packages that, for some reason, do not exist in dist-git

+     if "error_code" in resp_package_prs and resp_package_prs["error_code"] == "ENOPROJECT":

+         return []

+     if not "requests" in resp_package_prs:

+         app.logger.error("Skipping PRs from package %s because Pagure returned invalid data" % package)

+         return []  # Return early if pagure sent invalid data

+     for request in resp_package_prs["requests"]:

+         if request["status"] == "Open":

+             data.append({

+                 "title": request["title"],

+                 "author": request["user"]['name'],

+                 "comments": len(request["comments"]),

+                 "date_created": str(datetime.datetime.fromtimestamp(int(request["date_created"]))),

+                 "last_updated": str(datetime.datetime.fromtimestamp(int(request["last_updated"]))),

+                 "ci_status": get_pr_ci_result(package, request["id"]),

+                 "release": "Fedora Rawhide" if request["branch"] == "master" else release_from_dist(request["branch"]),

+                 "url": "https://src.fedoraproject.org/rpms/%s/pull-request/%s" % (package, request["id"])

+             })

+     return data

+ 

+ def get_pagure_groups():

+     """

+     Returns dictionary mapping all packager groups in pagure, it's members and packages

+     "group_name" : {

+         "users": [user_a, ...],

+         "packages": [package_a, ...]

+     }

+     """

+     groups_users = {}

+     resp = get_json("https://src.fedoraproject.org/api/0/groups?per_page=100")  # TODO: Handle pagination properly

+     if not resp:

+         return None

+     allowed_groups = [re.compile(a) for a in app.config["ALLOWED_PACKAGER_GROUPS"]]

+     for group in resp["groups"]:

+         if not any(regex.match(group) for regex in allowed_groups):

+             continue

+         app.logger.debug("Checking out Pagure group %s" % group)

+         group_resp = get_json("https://src.fedoraproject.org/api/0/group/%s?projects=1&acl=commit" % group)

+         if not group_resp:

+             continue

+         try:

+             groups_users[group] = {

+                 "users": group_resp["members"],

+                 "packages": [project["name"] for project in group_resp["projects"]]

+             }

+         except TypeError:

+             app.logger.error("Skipped Pagure group %s because of an error" % group)

+             continue

+     return groups_users

+ 

+ 

+ def get_user_group_packages(user, groups_map):

+     """

+     Returns list of packages user owns through a group

+     """

+     group_packages_dict = {}

+     group_packages_plain = set()

+     for group in groups_map:

+         if group == "packager":  # It seems every packager is in group "packager" and inherits zero packages through it

+             continue

+         if user in groups_map[group]["users"]:

+             group_packages_dict[group] = groups_map[group]["packages"]

+             for package in groups_map[group]["packages"]:

+                 group_packages_plain.add(package)

+     return group_packages_dict, group_packages_plain

+ 

+ 

+ def get_packages(user, pkg_owners_map, groups_map):

+     """

+     Returns all packages owned by user (including those owned through a group)

+     returns dict of:       "combined" - list of all packages, owned both directly or through group

+                            "group" - dict (keys are group names) of packages owned only through group

+                            "primary" - list of packages owned only directly

+     """

+     # packages_owners_map['rpms']['some_package_name'] contains list of 'some_package_name' maintainers

+     if user == "orphan":  # blacklist orphan user which has lots of unnecessary packages and would choke up our servers

+         return {

+             "primary": [],

+             "group": {},

+             "combined": []

+         }

+     group_packages, group_packages_list = get_user_group_packages(user, groups_map)

+     primary_packages = {package for package in pkg_owners_map['rpms'] if user in pkg_owners_map['rpms'][package]}

+     if user in groups_map:

+         primary_packages = set(groups_map[user]["packages"]) # User is group, return group packages as primary packages

+     return {

+         "primary": list(primary_packages),

+         "group": group_packages,

+         "combined": list(primary_packages | group_packages_list)  # Join sets of primary and group packages

+     }

+ 

+ 

+ def get_package_versions(package):

+     versions = {}

+     releases = CACHE.get("fedora_releases")

+     for release in releases["values"][:-1]: # Skip Rawhide

+         versions[release_from_dist(str(release))] = {"stable": None, "testing": None}

+     # Append Rawhide in standardized naming format

+     versions["Fedora Rawhide"] = {"stable": None, "testing": None}

+     for release in app.config["EPEL_RELEASES"]:

+         versions[release_from_dist("el" + str(release))] = {"stable": None, "testing": None}

+ 

+     bodhi_versions = get_json("https://src.fedoraproject.org/_dg/bodhi_updates/rpms/%s" % package)

+     if not bodhi_versions:

+         # Return dictionary we've prepared above containing empty fields for each release

+         # versions = {"Fedora XX": {"stable": None, "testing": None}, "Fedora Rawhide": {"stable": None, "testing": None}...}

+         return versions

+     for release in bodhi_versions["updates"]:

+         # Exclude ELN for now

+         if "ELN" in release:

+             continue

+         formatted_release = release_from_dist(release)

+         try:

+             versions[formatted_release]["stable"] = bodhi_versions["updates"][release]["stable"]

+         except KeyError:

+             pass

+         try:

+             versions[formatted_release]["testing"] = bodhi_versions["updates"][release]["testing"]

+         except KeyError:

+             pass

+ 

+     # We need to query mdapi if we have None in Fedora stable values

+     for release in versions:

+         if "Fedora" in release and not versions[release]["stable"]:

+             if "Rawhide" in release:

+                 mdapi_response = get_json("https://mdapi.fedoraproject.org/%s/srcpkg/%s" % ("rawhide", package), False, False)

+             else:

+                 mdapi_response = get_json("https://mdapi.fedoraproject.org/%s/srcpkg/%s" % ("f" + release[-2:], package), False, False)

+             if not mdapi_response:

+                 continue

+             versions[release]["stable"] = package + "-" + mdapi_response["version"] + "-" + mdapi_response["release"]

+ 

+     return versions

file modified
-1
@@ -5,7 +5,6 @@ 

  Flask == 1.1.1

  werkzeug == 0.16

  

- Flask-Caching

  Flask-Cors

  Flask-Login >= 0.3.0

  Flask-OIDC >= 1.1.1