| |
@@ -6,157 +6,71 @@
|
| |
# Authors: Will Woods <wwoods@redhat.com>
|
| |
# Seth Vidal <skvidal@fedoraproject.org>
|
| |
# Robert Marshall <rmarshall@redhat.com>
|
| |
+ # Adam Williamson <awilliam@redhat.com>
|
| |
+
|
| |
+ # this is a script, not a public module, we don't need docstrings
|
| |
+ # pylint: disable=missing-module-docstring,missing-class-docstring,missing-function-docstring
|
| |
|
| |
- from __future__ import print_function
|
| |
import sys
|
| |
import argparse
|
| |
import shutil
|
| |
from tempfile import mkdtemp
|
| |
import dnf
|
| |
|
| |
+
|
| |
class SackError(Exception):
|
| |
pass
|
| |
|
| |
- major_version = sys.version_info[0]
|
| |
|
| |
# Set some constants
|
| |
# Old definition
|
| |
- #critpath_groups = ['@core','@critical-path-base','@critical-path-gnome']
|
| |
- critpath_groups = [
|
| |
- '@core', '@critical-path-apps', '@critical-path-base',
|
| |
- '@critical-path-gnome', '@critical-path-kde', '@critical-path-lxde',
|
| |
- '@critical-path-xfce'
|
| |
+ # CRITPATH_GROUPS = ['@core','@critical-path-base','@critical-path-gnome']
|
| |
+ CRITPATH_GROUPS = [
|
| |
+ "@core",
|
| |
+ "@critical-path-apps",
|
| |
+ "@critical-path-base",
|
| |
+ "@critical-path-gnome",
|
| |
+ "@critical-path-kde",
|
| |
+ "@critical-path-lxde",
|
| |
+ "@critical-path-xfce",
|
| |
]
|
| |
- primary_arches=('armhfp', 'aarch64', 'x86_64')
|
| |
- alternate_arches=('ppc64le','s390x')
|
| |
- # There is not current a programmatic way to generate this list
|
| |
- fakearch = {'i386':'i686', 'x86_64':'x86_64', 'ppc64':'ppc64', 'ppc':'ppc64', 'armhfp':'armv7hl', 'aarch64':'aarch64', 'ppc64le':'ppc64le', 's390x':'s390x'}
|
| |
- fedora_baseurl = 'http://dl.fedoraproject.org/pub/fedora/linux/'
|
| |
- fedora_alternateurl = 'http://dl.fedoraproject.org/pub/fedora-secondary/'
|
| |
- releasepath = {
|
| |
- 'devel': 'development/rawhide/Everything/$basearch/os/',
|
| |
- 'rawhide': 'development/rawhide/Everything/$basearch/os/'
|
| |
- }
|
| |
- updatepath = {
|
| |
- 'devel': '',
|
| |
- 'rawhide': ''
|
| |
+ PRIMARY_ARCHES = ("armhfp", "aarch64", "x86_64")
|
| |
+ ALTERNATE_ARCHES = ("ppc64le", "s390x")
|
| |
+ FEDORA_BASEURL = "http://dl.fedoraproject.org/pub/fedora/linux/"
|
| |
+ FEDORA_ALTERNATEURL = "http://dl.fedoraproject.org/pub/fedora-secondary/"
|
| |
+ RELEASEPATH = {
|
| |
+ "devel": "development/rawhide/Everything/$basearch/os",
|
| |
+ "rawhide": "development/rawhide/Everything/$basearch/os",
|
| |
}
|
| |
+ UPDATEPATH = {"devel": "", "rawhide": ""}
|
| |
|
| |
- for x in range(12,32,1):
|
| |
- r = str(x)
|
| |
- releasepath[r] = 'releases/%s/Everything/$basearch/os/' % r
|
| |
- updatepath[r] = 'updates/%s/$basearch/' % r
|
| |
+ # the numbers here are "oldest active stable release" and "Branched
|
| |
+ # number (or Rawhide number if no Branched)"; Python ranges exclude
|
| |
+ # the top limiter so this is "all current stable releases"
|
| |
+ for r in range(35, 37, 1):
|
| |
+ RELEASEPATH[str(r)] = f"releases/{str(r)}/Everything/$basearch/os"
|
| |
+ UPDATEPATH[str(r)] = f"updates/{str(r)}/Everything/$basearch"
|
| |
|
| |
- # Branched Fedora goes here
|
| |
- branched = '32'
|
| |
- releasepath['branched'] = 'development/%s/Everything/$basearch/os' % branched
|
| |
- updatepath['branched'] = ''
|
| |
+ # Branched Fedora goes here, update the number when Branched number
|
| |
+ # changes
|
| |
+ RELEASEPATH["branched"] = "development/37/Everything/$basearch/os"
|
| |
+ UPDATEPATH["branched"] = ""
|
| |
|
| |
- # blacklists
|
| |
- blacklist = [ 'tzdata' ]
|
| |
|
| |
def get_source(pkg):
|
| |
- return pkg.rsplit('-',2)[0]
|
| |
-
|
| |
- provides_cache = {}
|
| |
- def resolve_deps(pkg, base):
|
| |
- deps = []
|
| |
- for prov in pkg.provides:
|
| |
- provides_cache[prov] = pkg.name
|
| |
- for req in pkg.requires:
|
| |
- if req in provides_cache:
|
| |
- deps.append(provides_cache[req])
|
| |
- continue
|
| |
- try:
|
| |
- po = base.returnPackageByDep(req)
|
| |
- except yum.Errors.YumBaseError:
|
| |
- print("ERROR: unresolved dep for %s of pkg %s" % (req[0],
|
| |
- pkg.name))
|
| |
- raise
|
| |
- provides_cache[req] = po.name
|
| |
- deps.append(po.name)
|
| |
- for prov in po.provides:
|
| |
- provides_cache[prov] = po.name
|
| |
-
|
| |
- return deps
|
| |
-
|
| |
- def expand_yum_critpath(my, start_list):
|
| |
- name_list = []
|
| |
- # Expand the start_list to a list of names
|
| |
- for name in start_list:
|
| |
- if name.startswith('@'):
|
| |
- print("expanding %s" % name)
|
| |
- count = 0
|
| |
- group = my.comps.return_group(name[1:])
|
| |
- for groupmem in group.mandatory_packages.keys() + group.default_packages.keys():
|
| |
- if groupmem not in name_list:
|
| |
- name_list.append(groupmem)
|
| |
- count += 1
|
| |
- print("%s packages added" % count)
|
| |
- else:
|
| |
- if name not in name_list:
|
| |
- name_list.append(name)
|
| |
- # Iterate over the name_list
|
| |
- count = 0
|
| |
- pkg_list = []
|
| |
- skipped_list = []
|
| |
- handled = []
|
| |
-
|
| |
- while name_list:
|
| |
- count += 1
|
| |
- name = name_list.pop(0)
|
| |
- handled.append(name)
|
| |
- if name in blacklist:
|
| |
- continue
|
| |
- print("depsolving %4u done/%4u remaining (%s)" % (count, len(name_list), name))
|
| |
- p = my.pkgSack.searchNevra(name=name)
|
| |
- if not p:
|
| |
- print("WARNING: unresolved package name: %s" % name)
|
| |
- skipped_list.append(name)
|
| |
- continue
|
| |
- for pkg in p:
|
| |
- pkg_list.append(pkg)
|
| |
- for dep in resolve_deps(pkg, my):
|
| |
- if dep not in handled and dep not in skipped_list and dep not in name_list:
|
| |
- print(" added %s" % dep)
|
| |
- name_list.append(dep)
|
| |
- print("depsolving complete.")
|
| |
- print("%u packages in critical path" % (count))
|
| |
- print("%u rejected package names: %s" % (len(skipped_list),
|
| |
- " ".join(skipped_list)))
|
| |
- return pkg_list
|
| |
-
|
| |
-
|
| |
- def setup_yum(url=None, release=None, arch=None):
|
| |
- my = yum.YumBase()
|
| |
- basearch = getBaseArch()
|
| |
- cachedir = mkdtemp(dir='/tmp', prefix='critpath-')
|
| |
- if arch is None:
|
| |
- arch = basearch
|
| |
- elif arch != basearch:
|
| |
- my.preconf.arch = fakearch[arch]
|
| |
- my.conf.cachedir = cachedir
|
| |
- my.conf.installroot = cachedir
|
| |
- my.repos.disableRepo('*')
|
| |
- if "/mnt/koji/compose/" not in args.url:
|
| |
- my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url+releasepath[release]])
|
| |
- print("adding critpath-repo-%s at %s" % (arch, url+releasepath[release]))
|
| |
- if updatepath[release]:
|
| |
- my.add_enable_repo('critpath-repo-updates-%s' % arch, baseurls=[url+updatepath[release]])
|
| |
- else:
|
| |
- my.add_enable_repo('critpath-repo-%s' % arch, baseurls=[url+'/$basearch/os/'])
|
| |
- print("adding critpath-repo-%s at %s" % (arch, url+'/$basearch/os/'))
|
| |
- return (my, cachedir)
|
| |
+ return pkg.rsplit("-", 2)[0]
|
| |
+
|
| |
|
| |
- def nvr(p):
|
| |
- return '-'.join([p.name, p.ver, p.rel])
|
| |
+ def nvr(pkg):
|
| |
+ return "-".join([pkg.name, pkg.ver, pkg.rel])
|
| |
|
| |
- def expand_dnf_critpath(release):
|
| |
- print("Resolving %s dependencies with DNF" % arch)
|
| |
+
|
| |
+ def expand_dnf_critpath(urls, arch):
|
| |
+ print(f"Resolving {arch} dependencies with DNF")
|
| |
base = dnf.Base()
|
| |
|
| |
- temp_cache_dir = mkdtemp(suffix='-critpath')
|
| |
- temp_install_root = mkdtemp(suffix='-critpath-installroot')
|
| |
+ temp_cache_dir = mkdtemp(suffix="-critpath")
|
| |
+ temp_install_root = mkdtemp(suffix="-critpath-installroot")
|
| |
|
| |
conf = base.conf
|
| |
# cache download data somewhere else
|
| |
@@ -166,43 +80,43 @@
|
| |
conf.persistdir = temp_cache_dir
|
| |
conf.installroot = temp_install_root
|
| |
# dnf needs arches, not basearches to work
|
| |
- conf.arch = fakearch[arch]
|
| |
+ if arch == "armhfp":
|
| |
+ conf.arch = "armv7hl"
|
| |
+ else:
|
| |
+ conf.arch = arch
|
| |
+ packages = set()
|
| |
|
| |
try:
|
| |
- packages = set()
|
| |
-
|
| |
# add a new repo requires an id, a conf object, and a baseurl
|
| |
- repo_url = url + releasepath[release]
|
| |
-
|
| |
# make sure we don't load the system repo and get local data
|
| |
- print("Basearch: %s" % conf.basearch)
|
| |
- print("Arch: %s" % conf.arch)
|
| |
- print("%s repo %s" % (arch, repo_url))
|
| |
+ print(f"Basearch: {conf.basearch}")
|
| |
+ print(f"Arch: {conf.arch}")
|
| |
+ for url in urls:
|
| |
+ print(f"Repo: {url}")
|
| |
|
| |
# mark all critpath groups in base object
|
| |
- for group in critpath_groups:
|
| |
+ for group in CRITPATH_GROUPS:
|
| |
base.reset(repos=True, goal=True, sack=True)
|
| |
- base.repos.add_new_repo(arch, conf, baseurl=[repo_url])
|
| |
+ repoids = []
|
| |
+ for (num, url) in enumerate(urls):
|
| |
+ repoid = arch + str(num)
|
| |
+ repoids.append(repoid)
|
| |
+ base.repos.add_new_repo(repoid, conf, baseurl=[url])
|
| |
base.fill_sack(load_system_repo=False)
|
| |
- if base.repos[arch].enabled is False:
|
| |
- raise SackError
|
| |
+ for repoid in repoids:
|
| |
+ if base.repos[repoid].enabled is False:
|
| |
+ raise SackError
|
| |
|
| |
# load up the comps data from configured repositories
|
| |
base.read_comps()
|
| |
- group = group.replace('@','')
|
| |
- base.group_install(group, ['mandatory', 'default', 'optional'], strict=False)
|
| |
+ group = group.replace("@", "")
|
| |
+ base.group_install(group, ["mandatory", "default", "optional"], strict=False)
|
| |
# resolve the groups marked in base object
|
| |
base.resolve()
|
| |
packages = packages.union(base.transaction.install_set)
|
| |
|
| |
return packages
|
| |
|
| |
-
|
| |
- except Exception as ex:
|
| |
- template = "An exception of type {0} occurred. Arguments:\n{1!r}"
|
| |
- message = template.format(type(ex).__name__, ex.args)
|
| |
- print(message)
|
| |
- print("DNF failed to synchronize the repository and cannot proceed.")
|
| |
finally:
|
| |
base.close()
|
| |
del base
|
| |
@@ -210,138 +124,145 @@
|
| |
shutil.rmtree(temp_cache_dir)
|
| |
shutil.rmtree(temp_install_root)
|
| |
|
| |
- def solves_with_dnf(release_version):
|
| |
- if release_version == 'branched':
|
| |
- return True
|
| |
- elif release_version == 'rawhide':
|
| |
- return True
|
| |
- elif release_version == 'devel':
|
| |
- return True
|
| |
- elif (int(release_version) > 26):
|
| |
- return True
|
| |
- else:
|
| |
- return False
|
| |
-
|
| |
-
|
| |
- if __name__ == '__main__':
|
| |
- # Option parsing
|
| |
- releases = sorted(releasepath.keys())
|
| |
- parser = argparse.ArgumentParser(usage = "%%(prog)s [options] [%s]" % '|'.join(releases))
|
| |
- parser.add_argument("--nvr", action='store_true', default=False,
|
| |
- help="output full NVR instead of just package name")
|
| |
- parser.add_argument("-a", "--arches", default=','.join(primary_arches),
|
| |
- help="Primary arches to evaluate (%(default)s)")
|
| |
- parser.add_argument("-s", "--altarches", default=','.join(alternate_arches),
|
| |
- help="Alternate arches to evaluate (%(default)s)")
|
| |
- parser.add_argument("-o", "--output", default="critpath.txt",
|
| |
- help="name of file to write critpath list (%(default)s)")
|
| |
- parser.add_argument("-u", "--url", default=fedora_baseurl,
|
| |
- help="URL to Primary repos")
|
| |
- parser.add_argument("-r", "--alturl", default=fedora_alternateurl,
|
| |
- help="URL to Alternate repos")
|
| |
- parser.add_argument("--srpm", action='store_true', default=False,
|
| |
- help="Output source RPMS instead of binary RPMS (for pkgdb)")
|
| |
- parser.add_argument("--noaltarch", action='store_true', default=False,
|
| |
- help="Not to run for alternate architectures")
|
| |
- parser.add_argument("--dnf", action='store_true', default=False,
|
| |
- help="Use DNF for dependency solving")
|
| |
- args, extras = parser.parse_known_args()
|
| |
-
|
| |
- # Input & Sanity Validation
|
| |
- if (len(extras) != 1) or (extras[0] not in releases):
|
| |
- parser.error("must choose a release from the list: %s" % releases)
|
| |
-
|
| |
- # Parse values
|
| |
- release = extras[0]
|
| |
- check_arches = args.arches.split(',')
|
| |
- alternate_check_arches = args.altarches.split(',')
|
| |
- package_count = 0
|
| |
-
|
| |
- using_dnf = False
|
| |
- if (args.dnf == True) or (major_version >= 3) or solves_with_dnf(release):
|
| |
- using_dnf = True
|
| |
-
|
| |
- if not using_dnf:
|
| |
- import yum
|
| |
- from rpmUtils.arch import getBaseArch
|
| |
- if yum.__version_info__ < (3, 2, 24) and args.arches != getBaseArch():
|
| |
- print("WARNING: yum < 3.2.24 may be unable to depsolve other arches.")
|
| |
- print("Get a newer yum or run this on an actual %s system." % args.arches)
|
| |
- sys.exit(1)
|
| |
- else:
|
| |
- dnf_version = tuple(map(int, dnf.const.VERSION.split(".")))
|
| |
- if dnf_version < (2, 0, 0):
|
| |
- print("This script requires the DNF version 2.0 API.")
|
| |
- sys.exit(1)
|
| |
|
| |
+ def parse_args():
|
| |
+ releases = sorted(RELEASEPATH.keys())
|
| |
+ parser = argparse.ArgumentParser()
|
| |
+ mexcgroup = parser.add_mutually_exclusive_group()
|
| |
+ parser.add_argument(
|
| |
+ "release",
|
| |
+ choices=releases,
|
| |
+ help="The release to work on (a release number, 'branched', or 'rawhide')",
|
| |
+ )
|
| |
+ mexcgroup.add_argument(
|
| |
+ "--nvr",
|
| |
+ action="store_true",
|
| |
+ default=False,
|
| |
+ help="output full NVR instead of just package name",
|
| |
+ )
|
| |
+ mexcgroup.add_argument(
|
| |
+ "--srpm",
|
| |
+ action="store_true",
|
| |
+ default=False,
|
| |
+ help="Output source RPMS instead of binary RPMS (for uploading to PDC)",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "-a",
|
| |
+ "--arches",
|
| |
+ default=",".join(PRIMARY_ARCHES),
|
| |
+ help="Primary arches to evaluate (%(default)s)",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "-s",
|
| |
+ "--altarches",
|
| |
+ default=",".join(ALTERNATE_ARCHES),
|
| |
+ help="Alternate arches to evaluate (%(default)s)",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "-o",
|
| |
+ "--output",
|
| |
+ default="critpath.txt",
|
| |
+ help="name of file to write critpath list (%(default)s)",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "-u",
|
| |
+ "--url",
|
| |
+ default=FEDORA_BASEURL,
|
| |
+ help="URL to fedora/linux directory for primary arches",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "-r",
|
| |
+ "--alturl",
|
| |
+ default=FEDORA_ALTERNATEURL,
|
| |
+ help="URL to fedora-secondary directory for alternate arches",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "-c",
|
| |
+ "--composeurl",
|
| |
+ required=False,
|
| |
+ help="URL to a complete (not arch split) compose, overrides -u and -r",
|
| |
+ )
|
| |
+ parser.add_argument(
|
| |
+ "--noaltarch",
|
| |
+ action="store_true",
|
| |
+ default=False,
|
| |
+ help="Not to run for alternate architectures",
|
| |
+ )
|
| |
+ return parser.parse_args()
|
| |
+
|
| |
+
|
| |
+ def write_file(critpath, outpath):
|
| |
+ with open(outpath, mode="w", encoding="utf-8") as outfh:
|
| |
+ for packagename in sorted(critpath):
|
| |
+ outfh.write(packagename + "\n")
|
| |
+ package_count = len(critpath)
|
| |
+ print(f"Wrote {package_count} items to {outpath}")
|
| |
+
|
| |
+
|
| |
+ def main():
|
| |
+ args = parse_args()
|
| |
+ release = args.release
|
| |
+ check_arches = args.arches.split(",")
|
| |
+ if not (release.isdigit() and int(release) < 37):
|
| |
+ # armhfp is gone on F37+
|
| |
+ check_arches.remove("armhfp")
|
| |
+ alternate_check_arches = args.altarches.split(",")
|
| |
+ package_count = 0
|
| |
|
| |
- if args.nvr and args.srpm:
|
| |
- print("ERROR: --nvr and --srpm are mutually exclusive")
|
| |
- sys.exit(1)
|
| |
-
|
| |
- if args.url != fedora_baseurl and "/mnt/koji/compose/" not in args.url:
|
| |
- releasepath[release] = releasepath[release].replace('development/','')
|
| |
- print("Using Base URL %s" % (args.url + releasepath[release]))
|
| |
+ updateurl = None
|
| |
+ updatealturl = None
|
| |
+ if args.composeurl:
|
| |
+ baseurl = args.composeurl + "/Everything/$basearch/os"
|
| |
+ alturl = args.composeurl + "/Everything/$basearch/os"
|
| |
else:
|
| |
- print("Using Base URL %s" % (args.url))
|
| |
+ baseurl = args.url + RELEASEPATH[release]
|
| |
+ alturl = args.alturl + RELEASEPATH[release]
|
| |
+ if UPDATEPATH[release]:
|
| |
+ updateurl = args.url + UPDATEPATH[release]
|
| |
+ updatealturl = args.alturl + UPDATEPATH[release]
|
| |
+
|
| |
+ print(f"Using Base URL {baseurl}")
|
| |
+ print(f"Using alternate arch base URL {alturl}")
|
| |
+ if updateurl:
|
| |
+ print(f"Using update URL {updateurl}")
|
| |
+ if updatealturl:
|
| |
+ print(f"Using alternate arch update URL {updatealturl}")
|
| |
|
| |
# Do the critpath expansion for each arch
|
| |
critpath = set()
|
| |
- for arch in check_arches+alternate_check_arches:
|
| |
- if arch in check_arches:
|
| |
- url=args.url
|
| |
- elif arch in alternate_check_arches:
|
| |
+ for arch in check_arches + alternate_check_arches:
|
| |
+ urls = [baseurl, updateurl]
|
| |
+ if arch in alternate_check_arches:
|
| |
if args.noaltarch:
|
| |
continue
|
| |
- else:
|
| |
- if "/mnt/koji/compose/" not in args.url:
|
| |
- url = args.alturl
|
| |
- else:
|
| |
- url = args.url
|
| |
- else:
|
| |
- raise Exception('Invalid architecture')
|
| |
- print("Expanding critical path for %s" % arch)
|
| |
- my = None
|
| |
- cachedir = None
|
| |
- pkgs = None
|
| |
-
|
| |
- if using_dnf:
|
| |
- pkgs = expand_dnf_critpath(release)
|
| |
- else:
|
| |
- print("Resolving %s dependencies with YUM" % arch)
|
| |
- (my, cachedir) = setup_yum(url = url, release=release, arch=arch)
|
| |
- pkgs = expand_yum_critpath(my, critpath_groups)
|
| |
+ urls = [alturl, updatealturl]
|
| |
+ # strip None update URLs when we're not using them
|
| |
+ urls = [url for url in urls if url]
|
| |
|
| |
- if pkgs is None:
|
| |
- package_count = 0
|
| |
- else:
|
| |
- package_count = len(pkgs)
|
| |
+ print(f"Expanding critical path for {arch}")
|
| |
+ pkgs = expand_dnf_critpath(urls, arch)
|
| |
|
| |
- print("%u packages for %s" % (package_count, arch))
|
| |
+ package_count = len(pkgs)
|
| |
+ print(f"{package_count} packages for {arch}")
|
| |
|
| |
if args.nvr:
|
| |
- critpath.update([nvr(p).encode('utf8') for p in pkgs])
|
| |
+ critpath.update([nvr(pkg) for pkg in pkgs])
|
| |
elif args.srpm:
|
| |
- critpath.update([get_source(p.sourcerpm) for p in pkgs])
|
| |
+ critpath.update([get_source(pkg.sourcerpm) for pkg in pkgs])
|
| |
else:
|
| |
- critpath.update([p.name.encode('utf8') for p in pkgs])
|
| |
+ critpath.update([pkg.name for pkg in pkgs])
|
| |
|
| |
del pkgs
|
| |
-
|
| |
- # deleting the cache dir has to happen after the above or
|
| |
- # massive errors occur
|
| |
- if not using_dnf:
|
| |
- del my
|
| |
- if cachedir.startswith("/tmp/"):
|
| |
- shutil.rmtree(cachedir)
|
| |
print()
|
| |
- # Write full list
|
| |
- f = open(args.output,"wb")
|
| |
- for packagename in sorted(critpath):
|
| |
- f.write(packagename + b'\n')
|
| |
- f.close()
|
| |
- if critpath == None:
|
| |
- package_count = 0
|
| |
- else:
|
| |
- package_count = len(critpath)
|
| |
- print("Wrote %u items to %s" % (package_count, args.output))
|
| |
+
|
| |
+ write_file(critpath, args.output)
|
| |
+
|
| |
+
|
| |
+ if __name__ == "__main__":
|
| |
+ try:
|
| |
+ main()
|
| |
+ except KeyboardInterrupt:
|
| |
+ sys.stderr.write("Interrupted, exiting...\n")
|
| |
+ sys.exit(1)
|
| |
+
|
| |
+ # vim: set textwidth=100 ts=8 et sw=4:
|
| |
There's a lot here! Best to look at the commit messages.