| |
@@ -8,16 +8,83 @@
|
| |
import click
|
| |
import smartcols
|
| |
import solv
|
| |
+ import requests
|
| |
+ from requests_toolbelt.downloadutils.tee import tee_to_file
|
| |
+ from fnmatch import fnmatch
|
| |
+ from urllib.parse import urljoin
|
| |
+ from bs4 import BeautifulSoup, SoupStrainer
|
| |
|
| |
XDG_CACHE_HOME = os.environ.get("XDG_CACHE_HOME") or os.path.expanduser("~/.cache")
|
| |
- CACHEDIR = os.path.join(XDG_CACHE_HOME, "depchase")
|
| |
+ CACHEDIR = os.path.join(XDG_CACHE_HOME, "fedmod")
|
| |
+
|
| |
+ log = logging.getLogger(__name__)
|
| |
+
|
| |
+ FALLBACK_STREAM = 'master'
|
| |
+ STREAM = 'f27'
|
| |
+ ARCH = 'x86_64'
|
| |
+ REPO_URL_PREFIX = "https://dl.fedoraproject.org/pub/fedora/linux/development/27/Everything/"
|
| |
+ REPO_METADATA_ARCH = os.path.join(REPO_URL_PREFIX, ARCH, "os/repodata/")
|
| |
+ REPO_METADATA_SOURCE = os.path.join(REPO_URL_PREFIX, "source/tree/repodata/")
|
| |
+ LOCAL_REPO_PATH = os.path.join(CACHEDIR, "repos", "f27")
|
| |
+ LOCAL_REPO_INFO_ARCH = os.path.join(LOCAL_REPO_PATH, ARCH)
|
| |
+ LOCAL_REPO_INFO_SOURCE = os.path.join(LOCAL_REPO_PATH, "source")
|
| |
+
|
| |
+ METADATA_FILES = ("*-filelists.xml.gz", "*-primary.xml.gz", "repomd.xml")
|
| |
+
|
| |
+ def _download_one_file(remote_url, filename):
|
| |
+ if os.path.exists(filename):
|
| |
+ print(f"Skipping download; {filename} already exists")
|
| |
+ return
|
| |
+ with requests.get(remote_url, stream=True) as response:
|
| |
+ print(f"Downloading {remote_url}")
|
| |
+ chunksize = 65536
|
| |
+ expected_chunks = int(response.headers["content-length"]) / chunksize
|
| |
+ downloader = tee_to_file(response, filename=filename, chunksize=chunksize)
|
| |
+ show_progress = click.progressbar(downloader, length=expected_chunks)
|
| |
+ with show_progress:
|
| |
+ for chunk in show_progress:
|
| |
+ pass
|
| |
+ print(f"Added {filename} to cache")
|
| |
+
|
| |
+ def _download_metadata_files(metadata_url, local_path):
|
| |
+ os.makedirs(local_path, exist_ok=True)
|
| |
+ response = requests.get(metadata_url)
|
| |
+ response.raise_for_status()
|
| |
+ link_filter = SoupStrainer("a", href=True)
|
| |
+ metadata_links = BeautifulSoup(response.text, parse_only=link_filter, features="lxml")
|
| |
+ patterns_to_check = set(METADATA_FILES)
|
| |
+ files_to_fetch = set()
|
| |
+ for link in metadata_links.find_all("a"):
|
| |
+ href = link["href"]
|
| |
+ for pattern in patterns_to_check:
|
| |
+ if fnmatch(href, pattern):
|
| |
+ patterns_to_check.remove(pattern)
|
| |
+ files_to_fetch.add(href)
|
| |
+ break # Go to next file
|
| |
+ predownload = set(os.listdir(local_path))
|
| |
+ for relative_href in files_to_fetch:
|
| |
+ absolute_href = urljoin(metadata_url, relative_href)
|
| |
+ filename = os.path.join(local_path, "repodata", relative_href)
|
| |
+ # This could be parallelised with concurrent.futures, but
|
| |
+ # probably not worth it (it makes the progress bars trickier)
|
| |
+ _download_one_file(absolute_href, filename)
|
| |
+ postdownload = set(os.listdir(local_path))
|
| |
+ # Prune any old metadata files automatically
|
| |
+ if len(postdownload) >= (len(predownload) + len(METADATA_FILES)):
|
| |
+ # TODO: Actually prune old metadata files
|
| |
+ pass
|
| |
+
|
| |
+
|
| |
+ def download_repo_metadata():
|
| |
+ """Downloads the latest repo metadata"""
|
| |
+ _download_metadata_files(REPO_METADATA_ARCH, LOCAL_REPO_INFO_ARCH)
|
| |
+ _download_metadata_files(REPO_METADATA_SOURCE, LOCAL_REPO_INFO_SOURCE)
|
| |
|
| |
- logger = logging.getLogger("depchase")
|
| |
|
| |
class Repo(object):
|
| |
- def __init__(self, name, baseurl):
|
| |
+ def __init__(self, name, metadata_path):
|
| |
self.name = name
|
| |
- self.baseurl = baseurl
|
| |
+ self.metadata_path = metadata_path
|
| |
self.handle = None
|
| |
self.cookie = None
|
| |
self.extcookie = None
|
| |
@@ -39,7 +106,7 @@
|
| |
return chksum.raw()
|
| |
|
| |
def cachepath(self, ext=None):
|
| |
- path = "{}-{}".format(self.name.replace(".", "_"), self.baseurl)
|
| |
+ path = "{}-{}".format(self.name.replace(".", "_"), self.metadata_path)
|
| |
if ext:
|
| |
path = "{}-{}.solvx".format(path, ext)
|
| |
else:
|
| |
@@ -140,7 +207,7 @@
|
| |
assert not self.handle
|
| |
self.handle = pool.add_repo(self.name)
|
| |
self.handle.appdata = self
|
| |
- f = self.download("repodata/repomd.xml", False, None)
|
| |
+ f = self.read_repo_metadata("repodata/repomd.xml", False, None)
|
| |
if not f:
|
| |
self.handle.free(True)
|
| |
self.handle = None
|
| |
@@ -152,7 +219,7 @@
|
| |
fname, fchksum = self.find("primary")
|
| |
if not fname:
|
| |
return False
|
| |
- f = self.download(fname, True, fchksum)
|
| |
+ f = self.read_repo_metadata(fname, True, fchksum)
|
| |
if not f:
|
| |
return False
|
| |
self.handle.add_rpmmd(f, None)
|
| |
@@ -162,8 +229,8 @@
|
| |
self.handle.create_stubs()
|
| |
return True
|
| |
|
| |
- def download(self, fname, uncompress, chksum):
|
| |
- f = open("{}/{}".format(self.baseurl, fname))
|
| |
+ def read_repo_metadata(self, fname, uncompress, chksum):
|
| |
+ f = open("{}/{}".format(self.metadata_path, fname))
|
| |
return solv.xfopen_fd(fname if uncompress else None, f.fileno())
|
| |
|
| |
def find(self, what):
|
| |
@@ -212,7 +279,7 @@
|
| |
return True
|
| |
filename = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_LOCATION)
|
| |
filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.REPOSITORY_REPOMD_CHECKSUM)
|
| |
- f = self.download(filename, True, filechksum)
|
| |
+ f = self.read_repo_metadata(filename, True, filechksum)
|
| |
if not f:
|
| |
return False
|
| |
if ext == "FL":
|
| |
@@ -240,22 +307,12 @@
|
| |
return repo.load_ext(repodata)
|
| |
return False
|
| |
|
| |
- def setup_repos(conffile):
|
| |
- conf = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation())
|
| |
+ def setup_repos():
|
| |
|
| |
- with open(conffile, "r") as cfg:
|
| |
- conf.read_file(cfg)
|
| |
-
|
| |
- repos = {}
|
| |
- for sect in conf.sections():
|
| |
- repos[sect] = Repo(sect, conf[sect]["path"])
|
| |
- for repo in repos.values():
|
| |
- if repo.name.endswith("-source"):
|
| |
- continue
|
| |
- repo.srcrepo = repos.get("{}-source".format(repo.name))
|
| |
- if repo.srcrepo is None:
|
| |
- raise RuntimeError("{}-source repo is not defined".format(repo.name))
|
| |
- return list(repos.values())
|
| |
+ srcrepo = Repo("f27-source", LOCAL_REPO_INFO_SOURCE)
|
| |
+ repo = Repo("f27", LOCAL_REPO_INFO_ARCH)
|
| |
+ repo.srcrepo = srcrepo
|
| |
+ return [repo, srcrepo]
|
| |
|
| |
def setup_pool(arch, repos=()):
|
| |
pool = solv.Pool()
|
| |
@@ -264,7 +321,7 @@
|
| |
pool.set_loadcallback(load_stub)
|
| |
|
| |
for repo in repos:
|
| |
- repo.baseurl = repo.baseurl.format(arch=arch)
|
| |
+ repo.metadata_path = repo.metadata_path.format(arch=arch)
|
| |
|
| |
for repo in repos:
|
| |
assert repo.load(pool)
|
| |
@@ -317,7 +374,7 @@
|
| |
|
| |
def print_transaction(pool, transaction):
|
| |
candq = transaction.newpackages()
|
| |
- if logger.getEffectiveLevel() <= logging.INFO:
|
| |
+ if log.getEffectiveLevel() <= logging.INFO:
|
| |
tb = smartcols.Table()
|
| |
tb.title = "DEPENDENCY INFORMATION"
|
| |
cl = tb.new_column("INFO")
|
| |
@@ -345,7 +402,7 @@
|
| |
lnc = lnss
|
| |
first = False
|
| |
lnc[cl_match] = str(m)
|
| |
- logger.info(tb)
|
| |
+ log.info(tb)
|
| |
|
| |
def solve(solver, pkgnames, selfhost=False):
|
| |
pool = solver.pool
|
| |
@@ -423,39 +480,37 @@
|
| |
return selfhosting, selfhosting_srcs
|
| |
|
| |
|
| |
- def make_pool(arch, config):
|
| |
- return setup_pool(arch, setup_repos(config))
|
| |
+ def make_pool(arch):
|
| |
+ return setup_pool(arch, setup_repos())
|
| |
|
| |
- '''
|
| |
- @click.option("--recommends/--no-recommends", default=False,
|
| |
- help="Do not process optional (aka weak) dependencies.")
|
| |
- @click.option("--hint", multiple=True,
|
| |
- help="""
|
| |
- Specify a package to have higher priority when more than one package could
|
| |
- satisfy a dependency. This option may be specified multiple times.
|
| |
+ _DEFAULT_HINTS = ("glibc-minimal-langpack",)
|
| |
|
| |
- For example, it is recommended to use --hint=glibc-minimal-langpack.
|
| |
- """)
|
| |
- @click.option("--selfhost", is_flag=True,
|
| |
- help="Look up the build dependencies as well.")
|
| |
- '''
|
| |
- def resolve(pool, pkgnames, recommends, hint, selfhost):
|
| |
+ def resolve(pkgnames, hints=_DEFAULT_HINTS, recommendations=False, builddeps=False):
|
| |
+ """Iterate over the resolved dependency set for the given packages
|
| |
|
| |
+ *hints*: Packages that have higher priority when more than one package
|
| |
+ could satisfy a dependency.
|
| |
+ *recommendations*: Whether or not to report recommended dependencies as well
|
| |
+ as required dependencies (Default: required deps only)
|
| |
+ *builddeps*: Whether or not to report build dependencies as well
|
| |
+ as runtime dependencies (Default: runtime deps only)
|
| |
+ """
|
| |
+ pool = make_pool("x86_64")
|
| |
# Set up initial hints
|
| |
favorq = []
|
| |
- for n in hint:
|
| |
+ for n in hints:
|
| |
sel = pool.select(n, solv.Selection.SELECTION_NAME)
|
| |
favorq += sel.jobs(solv.Job.SOLVER_FAVOR)
|
| |
pool.setpooljobs(favorq)
|
| |
|
| |
solver = pool.Solver()
|
| |
- if not recommends:
|
| |
+ if not recommendations:
|
| |
# Ignore weak deps
|
| |
solver.set_flag(solv.Solver.SOLVER_FLAG_IGNORE_RECOMMENDED, 1)
|
| |
|
| |
- binary, source = solve(solver, pkgnames, selfhost=selfhost)
|
| |
+ binary, source = solve(solver, pkgnames, selfhost=builddeps)
|
| |
for p in itertools.chain(binary, source or ()):
|
| |
- print(p)
|
| |
+ yield p
|
| |
|
| |
def print_reldeps(pool, pkg):
|
| |
sel = pool.select(pkg, solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_DOTARCH)
|
| |
a resolve() functions as its main API