#7 Switch to using depchase
Merged 6 years ago by ncoghlan. Opened 6 years ago by ncoghlan.
modularity/ ncoghlan/fedmod switch-to-depchase  into  master

file modified
+7 -1
@@ -3,8 +3,14 @@ 

  verify_ssl = true

  

  [dev-packages]

+ 

  pytest = "*"

  six = "*"

+ snakeviz = "*"

  

  [packages]

- "fedmod" = {editable = true, path = "."} 

\ No newline at end of file

+ 

+ fedmod = {editable = true, path = "."}

+ "beautifulsoup4" = "*"

+ "requests-toolbelt" = "*"

+ click = "*" 

\ No newline at end of file

file modified
+46 -6
@@ -1,7 +1,7 @@ 

  {

      "_meta": {

          "hash": {

-             "sha256": "a6176d82ed4cb9cea75035faf0b129a2b280fa41f9dfcf4fa8a4eb7c5cf8730c"

+             "sha256": "7dac39326ba07082a5742f5f9f35da3c32a2ea834ffa241142deae5556189695"

          },

          "host-environment-markers": {

              "implementation_name": "cpython",
@@ -9,9 +9,9 @@ 

              "os_name": "posix",

              "platform_machine": "x86_64",

              "platform_python_implementation": "CPython",

-             "platform_release": "4.12.13-300.fc26.x86_64",

+             "platform_release": "4.13.4-200.fc26.x86_64",

              "platform_system": "Linux",

-             "platform_version": "#1 SMP Thu Sep 14 16:00:38 UTC 2017",

+             "platform_version": "#1 SMP Thu Sep 28 20:46:39 UTC 2017",

              "python_full_version": "3.6.2",

              "python_version": "3.6",

              "sys_platform": "linux"
@@ -32,6 +32,14 @@ 

              ],

              "version": "==1.9.2"

          },

+         "beautifulsoup4": {

+             "hashes": [

+                 "sha256:7015e76bf32f1f574636c4288399a6de66ce08fb7b2457f628a8d70c0fbabb11",

+                 "sha256:11a9a27b7d3bddc6d86f59fb76afb70e921a25ac2d6cc55b40d072bd68435a76",

+                 "sha256:808b6ac932dccb0a4126558f7dfdcf41710dd44a4ef497a0bb59a77f9f078e89"

+             ],

+             "version": "==4.6.0"

+         },

          "certifi": {

              "hashes": [

                  "sha256:54a07c09c586b0e4c619f02a5e94e36619da8e2b053e20f594348c0611803704",
@@ -46,6 +54,13 @@ 

              ],

              "version": "==3.0.4"

          },

+         "click": {

+             "hashes": [

+                 "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d",

+                 "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"

+             ],

+             "version": "==6.7"

+         },

          "fedmod": {

              "editable": true,

              "path": "."
@@ -73,6 +88,7 @@ 

              "hashes": [

                  "sha256:60f84e1b606d58cc457b186914c195072ebebf5d5a05e75c0136541808e06523"

              ],

+             "markers": "sys_platform != 'win32'",

              "version": "==1.1.14"

          },

          "python-dateutil": {
@@ -115,6 +131,13 @@ 

              ],

              "version": "==0.11.0"

          },

+         "requests-toolbelt": {

+             "hashes": [

+                 "sha256:42c9c170abc2cacb78b8ab23ac957945c7716249206f90874651971a4acff237",

+                 "sha256:f6a531936c6fa4c6cfce1b9c10d5c4f498d16528d2a54a22ca00011205a187b5"

+             ],

+             "version": "==0.8.0"

+         },

          "six": {

              "hashes": [

                  "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb",
@@ -140,10 +163,10 @@ 

          },

          "pytest": {

              "hashes": [

-                 "sha256:b84f554f8ddc23add65c411bf112b2d88e2489fd45f753b1cae5936358bdf314",

-                 "sha256:f46e49e0340a532764991c498244a60e3a37d7424a532b3ff1a6a7653f1a403a"

+                 "sha256:81a25f36a97da3313e1125fce9e7bbbba565bc7fec3c5beb14c262ddab238ac1",

+                 "sha256:27fa6617efc2869d3e969a3e75ec060375bfb28831ade8b5cdd68da3a741dc3c"

              ],

-             "version": "==3.2.2"

+             "version": "==3.2.3"

          },

          "six": {

              "hashes": [
@@ -151,6 +174,23 @@ 

                  "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"

              ],

              "version": "==1.11.0"

+         },

+         "snakeviz": {

+             "hashes": [

+                 "sha256:ef58335fe20e71384668159899985e6802bf668d1c82bb5ccd90592aba3b9ec2",

+                 "sha256:adc95ec5eb0a04bc2aa8325e8a713b9aa82ccb30425f9efe0e4d7479e2fa3bcb"

+             ],

+             "version": "==0.4.2"

+         },

+         "tornado": {

+             "hashes": [

+                 "sha256:62a5d4c66bf4e86d25a02e9de97293860b59e61f9c465e80336ba0fc308aacf6",

+                 "sha256:f109c066411c44bcd3bc877267b45feb8e29092ede59dd0582739444c2344b00",

+                 "sha256:e66f47db4753c6f6849af1f82f04bdc7d2c1f5d64b7cc11ddd17230295c8887f",

+                 "sha256:2b40720a7b164848ca5c51fab0feac7e3717adcb87bec77f81f7809b72bf7f56",

+                 "sha256:1fb8e494cd46c674d86fac5885a3ff87b0e283937a47d74eb3c02a48c9e89ad0"

+             ],

+             "version": "==4.5.2"

          }

      }

  }

file modified
+102 -47
@@ -8,16 +8,83 @@ 

  import click

  import smartcols

  import solv

+ import requests

+ from requests_toolbelt.downloadutils.tee import tee_to_file

+ from fnmatch import fnmatch

+ from urllib.parse import urljoin

+ from bs4 import BeautifulSoup, SoupStrainer

  

  XDG_CACHE_HOME = os.environ.get("XDG_CACHE_HOME") or os.path.expanduser("~/.cache")

- CACHEDIR = os.path.join(XDG_CACHE_HOME, "depchase")

+ CACHEDIR = os.path.join(XDG_CACHE_HOME, "fedmod")

+ 

+ log = logging.getLogger(__name__)

+ 

+ FALLBACK_STREAM = 'master'

+ STREAM = 'f27'

+ ARCH = 'x86_64'

+ REPO_URL_PREFIX = "https://dl.fedoraproject.org/pub/fedora/linux/development/27/Everything/"

+ REPO_METADATA_ARCH = os.path.join(REPO_URL_PREFIX, ARCH, "os/repodata/")

+ REPO_METADATA_SOURCE = os.path.join(REPO_URL_PREFIX, "source/tree/repodata/")

+ LOCAL_REPO_PATH = os.path.join(CACHEDIR, "repos", "f27")

+ LOCAL_REPO_INFO_ARCH = os.path.join(LOCAL_REPO_PATH, ARCH)

+ LOCAL_REPO_INFO_SOURCE = os.path.join(LOCAL_REPO_PATH, "source")

+ 

+ METADATA_FILES = ("*-filelists.xml.gz", "*-primary.xml.gz", "repomd.xml")

+ 

+ def _download_one_file(remote_url, filename):

+     if os.path.exists(filename):

+         print(f"Skipping download; {filename} already exists")

+         return

+     with requests.get(remote_url, stream=True) as response:

+         print(f"Downloading {remote_url}")

+         chunksize = 65536

+         expected_chunks = int(response.headers["content-length"]) / chunksize

+         downloader = tee_to_file(response, filename=filename, chunksize=chunksize)

+         show_progress = click.progressbar(downloader, length=expected_chunks)

+         with show_progress:

+             for chunk in show_progress:

+                 pass

+     print(f"Added {filename} to cache")

+ 

+ def _download_metadata_files(metadata_url, local_path):

+     os.makedirs(local_path, exist_ok=True)

+     response = requests.get(metadata_url)

+     response.raise_for_status()

+     link_filter = SoupStrainer("a", href=True)

+     metadata_links = BeautifulSoup(response.text, parse_only=link_filter, features="lxml")

+     patterns_to_check = set(METADATA_FILES)

+     files_to_fetch = set()

+     for link in metadata_links.find_all("a"):

+         href = link["href"]

+         for pattern in patterns_to_check:

+             if fnmatch(href, pattern):

+                 patterns_to_check.remove(pattern)

+                 files_to_fetch.add(href)

+                 break # Go to next file

+     predownload = set(os.listdir(local_path))

+     for relative_href in files_to_fetch:

+         absolute_href = urljoin(metadata_url, relative_href)

+         filename = os.path.join(local_path, "repodata", relative_href)

+         # This could be parallelised with concurrent.futures, but

+         # probably not worth it (it makes the progress bars trickier)

+         _download_one_file(absolute_href, filename)

+     postdownload = set(os.listdir(local_path))

+     # Prune any old metadata files automatically

+     if len(postdownload) >= (len(predownload) + len(METADATA_FILES)):

+         # TODO: Actually prune old metadata files

+         pass

+ 

+ 

+ def download_repo_metadata():

+     """Downloads the latest repo metadata"""

+     _download_metadata_files(REPO_METADATA_ARCH, LOCAL_REPO_INFO_ARCH)

+     _download_metadata_files(REPO_METADATA_SOURCE, LOCAL_REPO_INFO_SOURCE)

  

- logger = logging.getLogger("depchase")

  

  class Repo(object):

-     def __init__(self, name, baseurl):

+     def __init__(self, name, metadata_path):

          self.name = name

-         self.baseurl = baseurl

+         self.metadata_path = metadata_path

          self.handle = None

          self.cookie = None

          self.extcookie = None
@@ -39,7 +106,7 @@ 

          return chksum.raw()

  

      def cachepath(self, ext=None):

-         path = "{}-{}".format(self.name.replace(".", "_"), self.baseurl)

+         path = "{}-{}".format(self.name.replace(".", "_"), self.metadata_path)

          if ext:

              path = "{}-{}.solvx".format(path, ext)

          else:
@@ -140,7 +207,7 @@ 

          assert not self.handle

          self.handle = pool.add_repo(self.name)

          self.handle.appdata = self

-         f = self.download("repodata/repomd.xml", False, None)

+         f = self.read_repo_metadata("repodata/repomd.xml", False, None)

          if not f:

              self.handle.free(True)

              self.handle = None
@@ -152,7 +219,7 @@ 

          fname, fchksum = self.find("primary")

          if not fname:

              return False

-         f = self.download(fname, True, fchksum)

+         f = self.read_repo_metadata(fname, True, fchksum)

          if not f:

              return False

          self.handle.add_rpmmd(f, None)
@@ -162,8 +229,8 @@ 

          self.handle.create_stubs()

          return True

  

-     def download(self, fname, uncompress, chksum):

-         f = open("{}/{}".format(self.baseurl, fname))

+     def read_repo_metadata(self, fname, uncompress, chksum):

+         f = open("{}/{}".format(self.metadata_path, fname))

          return solv.xfopen_fd(fname if uncompress else None, f.fileno())

  

      def find(self, what):
@@ -212,7 +279,7 @@ 

              return True

          filename = repodata.lookup_str(solv.SOLVID_META, solv.REPOSITORY_REPOMD_LOCATION)

          filechksum = repodata.lookup_checksum(solv.SOLVID_META, solv.REPOSITORY_REPOMD_CHECKSUM)

-         f = self.download(filename, True, filechksum)

+         f = self.read_repo_metadata(filename, True, filechksum)

          if not f:

              return False

          if ext == "FL":
@@ -240,22 +307,12 @@ 

          return repo.load_ext(repodata)

      return False

  

- def setup_repos(conffile):

-     conf = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation())

+ def setup_repos():

  

-     with open(conffile, "r") as cfg:

-         conf.read_file(cfg)

- 

-     repos = {}

-     for sect in conf.sections():

-         repos[sect] = Repo(sect, conf[sect]["path"])

-     for repo in repos.values():

-         if repo.name.endswith("-source"):

-             continue

-         repo.srcrepo = repos.get("{}-source".format(repo.name))

-         if repo.srcrepo is None:

-             raise RuntimeError("{}-source repo is not defined".format(repo.name))

-     return list(repos.values())

+     srcrepo = Repo("f27-source", LOCAL_REPO_INFO_SOURCE)

+     repo = Repo("f27", LOCAL_REPO_INFO_ARCH)

+     repo.srcrepo = srcrepo

+     return [repo, srcrepo]

  

  def setup_pool(arch, repos=()):

      pool = solv.Pool()
@@ -264,7 +321,7 @@ 

      pool.set_loadcallback(load_stub)

  

      for repo in repos:

-         repo.baseurl = repo.baseurl.format(arch=arch)

+         repo.metadata_path = repo.metadata_path.format(arch=arch)

  

      for repo in repos:

          assert repo.load(pool)
@@ -317,7 +374,7 @@ 

  

  def print_transaction(pool, transaction):

      candq = transaction.newpackages()

-     if logger.getEffectiveLevel() <= logging.INFO:

+     if log.getEffectiveLevel() <= logging.INFO:

          tb = smartcols.Table()

          tb.title = "DEPENDENCY INFORMATION"

          cl = tb.new_column("INFO")
@@ -345,7 +402,7 @@ 

                          lnc = lnss

                          first = False

                      lnc[cl_match] = str(m)

-         logger.info(tb)

+         log.info(tb)

  

  def solve(solver, pkgnames, selfhost=False):

      pool = solver.pool
@@ -423,39 +480,37 @@ 

      return selfhosting, selfhosting_srcs

  

  

- def make_pool(arch, config):

-     return setup_pool(arch, setup_repos(config))

+ def make_pool(arch):

+     return setup_pool(arch, setup_repos())

  

- '''

- @click.option("--recommends/--no-recommends", default=False,

-               help="Do not process optional (aka weak) dependencies.")

- @click.option("--hint", multiple=True,

-               help="""

- Specify a package to have higher priority when more than one package could

- satisfy a dependency. This option may be specified multiple times.

+ _DEFAULT_HINTS = ("glibc-minimal-langpack",)

  

- For example, it is recommended to use --hint=glibc-minimal-langpack.

- """)

- @click.option("--selfhost", is_flag=True,

-               help="Look up the build dependencies as well.")

- '''

- def resolve(pool, pkgnames, recommends, hint, selfhost):

+ def resolve(pkgnames, hints=_DEFAULT_HINTS, recommendations=False, builddeps=False):

+     """Iterate over the resolved dependency set for the given packages

  

+     *hints*:  Packages that have higher priority when more than one package

+               could satisfy a dependency.

+     *recommendations*: Whether or not to report recommended dependencies as well

+                  as required dependencies (Default: required deps only)

+     *builddeps*: Whether or not to report build dependencies as well

+                  as runtime dependencies (Default: runtime deps only)

+     """

+     pool = make_pool("x86_64")

      # Set up initial hints

      favorq = []

-     for n in hint:

+     for n in hints:

          sel = pool.select(n, solv.Selection.SELECTION_NAME)

          favorq += sel.jobs(solv.Job.SOLVER_FAVOR)

      pool.setpooljobs(favorq)

  

      solver = pool.Solver()

-     if not recommends:

+     if not recommendations:

          # Ignore weak deps

          solver.set_flag(solv.Solver.SOLVER_FLAG_IGNORE_RECOMMENDED, 1)

  

-     binary, source = solve(solver, pkgnames, selfhost=selfhost)

+     binary, source = solve(solver, pkgnames, selfhost=builddeps)

      for p in itertools.chain(binary, source or ()):

-         print(p)

+         yield p

  

  def print_reldeps(pool, pkg):

      sel = pool.select(pkg, solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_DOTARCH)

file modified
+11
@@ -3,6 +3,9 @@ 

  import logging

  

  from .module_generator import ModuleGenerator

+ from . import _depchase

+ 

+ # TODO: Switch this over to click (already a dependency for progress bars)

  

  class ModtoolsCLI(object):

      """ Class for processing data from commandline """
@@ -32,6 +35,12 @@ 

              help="Specify list of packages for module.",

          )

  

+         parser_metadata = subparsers.add_parser(

+             'metadata', parents=[base_parser],

+             help="Fetches repository metadata",

+             description="Caches needed repository metadata locally"

+         )

+ 

          return parser

  

      def __init__(self, args=None):
@@ -56,6 +65,8 @@ 

              if cli.args.cmd_name == 'rpm2module':

                  mg = ModuleGenerator(cli.args.pkgs)

                  mg.run()

+             elif cli.args.cmd_name == 'metadata':

+                 _depchase.download_repo_metadata()

  

          except KeyboardInterrupt:

              print('\nInterrupted by user')

@@ -1,203 +0,0 @@ 

- import os

- import logging

- import requests

- from six.moves import configparser

- 

- from functools import lru_cache, partial

- 

- import dnf

- 

- FALLBACK_STREAM = 'master'

- STREAM = 'f27'

- REPO_F27 = "https://dl.fedoraproject.org/pub/fedora/linux/development/27/Everything/x86_64/os/"

- REPO_F27_SOURCE = "https://dl.fedoraproject.org/pub/fedora/linux/development/27/Everything/source/tree/"

- ARCH = 'x86_64'

- 

- 

- # TODO: add ability to solve more packages in one run

- # TODO: solve architecture! - as option?

- 

- 

- p = None

- 

- def download_repo_metadata(config_path, metadata_dir):

-     """TODO: Automate retrieving the required repo metadata"""

-     raise NotImplementedError(

-         "See https://github.com/fedora-modularity/depchase#installation "

-         "for download instructions"

-     )

- 

- 

- def get_pkgs_source_rpm_name(pkg):

-     """

-     pkg: dnf.Package

- 

-     returns name of source RPM used to build the provided rpm

-     """

-     return pkg.source_name

- 

- @lru_cache()

- def get_package_from_base(base, *pkg, **kwds):

-     if pkg:

-         kwds["name"] = pkg[0]

-     result = base.sack.query().filter(latest=True, **kwds)

-     if not result:

-         return None

-     return result[0]

- 

- class ModuleDepsDiffer(object):

- 

-     def __init__(self, pkgs):

-         self.repos = dict()

-         self.repo_provides = dict()

-         # TODO: get this from args

-         self.packages = list(pkgs)

-         self.build_req = set()

-         self.runtime_req = set()

-         self.build_caps_classified = dict()

-         self.build_caps_classified['result'] = set()

-         self.runtime_caps_classified = dict()

-         self.runtime_caps_classified['result'] = set()

-         self.repo_bases = {}

-         self.module_id_to_name = {}

- 

-     def get_module_name(self, koji_tag_name):

-         return self.module_id_to_name.get(koji_tag_name, koji_tag_name)

- 

-     def obtain_module_names(self):

-         # TODO: download the repo metadata containing module info

-         pass

- 

-     @staticmethod

-     @lru_cache()

-     def get_base_from_repo(reponame, repourl):

-         logging.info('Loading repo: %s', reponame)

-         base = dnf.Base()

-         repo = dnf.repo.Repo(reponame, base.conf)

-         repo.baseurl = repourl

-         repo.load()

-         repo.enable()

-         base.repos.add(repo)

-         base.fill_sack(load_available_repos=True, load_system_repo=False)

-         return base

- 

-     def _init_repo_bases(self):

-         # TODO: download the repo metadata containing module info

-         pass

- 

-     def get_package_requires(self):

-         # runtime

-         logging.info('Getting package requirements')

-         pkgs = []

-         base = ModuleDepsDiffer.get_base_from_repo('f27', REPO_F27)

-         for pkg in self.packages:

-             filter_result = get_package_from_base(base, pkg, arch=('noarch', ARCH))

-             if filter_result is None:

-                 raise ValueError('RPM not found in repo {!}'.format(pkg))

-             pkgs.append(filter_result)

-         for pkg in pkgs:

-             requires = getattr(pkg, 'requires')

-             for q in requires:

-                 self.runtime_req.add(str(q))

- 

-         # build

-         source_pkgs = []

-         base = ModuleDepsDiffer.get_base_from_repo('f27-source', REPO_F27_SOURCE)

-         for pkg in pkgs:

-             source_rpm_name = get_pkgs_source_rpm_name(pkg)

-             filter_result = get_package_from_base(base, source_rpm_name)

-             if filter_result is None:

-                 raise ValueError('SRPM not found in repo {!}'.format(source_rpm_name))

-             source_pkgs.append(filter_result)

-         for pkg in source_pkgs:

-             requires = getattr(pkg, 'requires')

-             for q in requires:

-                 self.build_req.add(str(q))

- 

-     # Igor suggests to do complete dependency solving here:

-     #   base.install('package'), installroot=, make sure the right repos are enabled

-     #   base.resolve()

-     #   and then analyze base.transaction.install_set

-     # weak deps will get resolved correctly with this approach

-     def classify_caps(self, caps_classified, requirements):

-         for cap in requirements:

-             found = False

-             for reponame, base in self.repo_bases.items():

-                 q = base.sack.query()

-                 pkg = q.filter(provides=[cap], arch=['noarch', ARCH], latest=True)

-                 if pkg:

-                     found = True

-                     caps_classified.setdefault(reponame, set())

-                     caps_classified[reponame].add(cap)

-                     # don't break here in case one cap is present in multiple repos

-             if not found:

-                 caps_classified['result'].add(cap)

- 

-     @staticmethod

-     def whatprovides(caps):

-         result = set()

-         base = ModuleDepsDiffer.get_base_from_repo('f27', REPO_F27)

-         query = partial(get_package_from_base, base)

-         for cap in caps:

-             logging.info('Getting source package for %s:', cap)

-             pkg = query(provides__glob=(cap,), arch=('noarch', ARCH))

-             if pkg is None:

-                 pkg = query(file__glob=cap)

-                 if pkg is None:

-                     raise ValueError('No package provides capability \'' + cap + '\'')

-             source_rpm_name = get_pkgs_source_rpm_name(pkg)

-             result.add(source_rpm_name)

-         return result

- 

-     def _dump_caps_result(self, items):

-         for repo, caps in items:

-             if not caps:

-                 continue

-             if repo == 'result':

-                 print("Capabilities which aren't provided by any module:")

-             else:

-                 print("Components which are part of " +

-                       self.get_module_name(repo) +

-                       " module:")

-             for cap in sorted(caps):

-                 print(cap)

-             print('')

- 

-     def dump_caps_result(self):

-         print('BUILD requirements:')

-         print('-------------------')

-         self._dump_caps_result(self.build_caps_classified.items())

-         print('\n')

-         print('RUNTIME requirements:')

-         print('---------------------')

-         self._dump_caps_result(self.runtime_caps_classified.items())

- 

-     def _dump_pkgs_result(self, items):

-         for repo, caps in items:

-             if not caps:

-                 continue

-             if repo == 'result':

-                 print("Components which aren't provided by any module:")

-             else:

-                 print("Components which are part of " +

-                       self.get_module_name(repo) +

-                       " module:")

-             for cap in sorted(ModuleDepsDiffer.whatprovides(caps)):

-                 print(cap)

-             print('')

- 

-     def dump_pkgs_result(self):

-         print('BUILD dependencies:')

-         print('-------------------')

-         self._dump_pkgs_result(self.build_caps_classified.items())

-         print('\n')

-         print('RUNTIME dependencies:')

-         print('---------------------')

-         self._dump_pkgs_result(self.runtime_caps_classified.items())

- 

-     def run(self):

-         self.obtain_module_names()

-         self._init_repo_bases()

-         self.get_package_requires()

-         self.classify_caps(self.build_caps_classified, self.build_req)

-         self.classify_caps(self.runtime_caps_classified, self.runtime_req)

file modified
+25 -49
@@ -1,9 +1,9 @@ 

  from __future__ import absolute_import

  

  import modulemd

- import dnf

- from .module_deps_differ import ModuleDepsDiffer

  import logging

+ import dnf

+ from . import _depchase

  

  class ModuleGenerator(object):

  
@@ -11,9 +11,25 @@ 

          self.pkgs = pkgs

          self.pkg = None

          self.mmd = modulemd.ModuleMetadata()

-         self.build_deps = set()

-         self.run_deps = set()

-         self.differ = ModuleDepsDiffer(pkgs)

+         # Dependency issue with the test build deps, so skip that for now...

+         all_deps = {s.name for s in _depchase.resolve(pkgs, builddeps=False)}

+         runtime_deps = {s.name for s in _depchase.resolve(pkgs)}

+         self.run_deps = runtime_deps

+         self.build_deps = all_deps - runtime_deps

+ 

+     def _get_pkg_info(self):

+         """Function loads package from dnf"""

+         # TODO: Get this from the _depchase metadata, not the system metadata

+         logging.info("Getting package info from DNF")

+         b = dnf.Base()

+         b.read_all_repos()

+         b.fill_sack()

+         q = b.sack.query().filter(name=self.pkgs, reponame='fedora', latest=True)

+         if len(q) > 1:

+             raise ValueError('Name of package is not unique')

+         if len(q) == 0:

+             raise ValueError('No package found in repo')

+         self.pkg = q[0]

  

      def _save_module_md(self):

          """
@@ -49,7 +65,8 @@ 

  

          for pkg in self.pkgs:

              self.mmd.api.add_rpm(pkg)

-             self.mmd.components.add_rpm(ModuleDepsDiffer.whatprovides([pkg]).pop(), "Package in api", buildorder=self._get_build_order(pkg))

+             # TODO: Restore resolution of pkg to the actual provider

+             self.mmd.components.add_rpm(pkg, "Package in api", buildorder=self._get_build_order(pkg))

  

          for pkg in (self.build_deps - self.mmd.api.rpms - self.run_deps):

              self.mmd.filter.add_rpm(pkg)
@@ -63,19 +80,8 @@ 

          for pkg in (self.run_deps - self.build_deps):

              self.mmd.components.add_rpm(pkg, "Runtime dependency.", buildorder=self._get_build_order(pkg))

  

-         for mod, caps in self.differ.build_caps_classified.items():

-             if mod == 'result':

-                 continue

-             if caps:

-                 name, stream = self.differ.get_module_name(mod).split(':')

-                 self.mmd.add_buildrequires(name, stream)

- 

-         for mod, caps in self.differ.runtime_caps_classified.items():

-             if mod == 'result':

-                 continue

-             if caps:

-                 name, stream = self.differ.get_module_name(mod).split(':')

-                 self.mmd.add_requires(name, stream)

+         # TODO: Restore module level dependency declarations for modules that

+         #       depend on more than just the base platform

  

      def _get_build_order(self, pkg):

          if pkg in self.mmd.api.rpms:
@@ -83,38 +89,8 @@ 

          else:

              return 0

  

-     def _get_pkg_info(self):

-         """

-             Function loads package from dnf

-             :return:

-             """

-         logging.info("Getting package info from DNF")

-         b = dnf.Base()

-         b.read_all_repos()

-         b.fill_sack()

- 

-         q = b.sack.query().filter(name=self.pkgs, reponame='fedora', latest=True)

- 

-         if len(q) > 1:

-             raise ValueError('Name of package is not unique')

-         if len(q) == 0:

-             raise ValueError('No package found in repo')

-         self.pkg = q[0]

- 

-     def _get_dependencies(self):

-         """

-             Function gets build and runtime dependencies of package

-             :return:

-             """

-         logging.info('Dependency resolution started')

-         self.differ.run()

-         self.build_deps = ModuleDepsDiffer.whatprovides(self.differ.build_caps_classified['result'])

-         self.run_deps = ModuleDepsDiffer.whatprovides(self.differ.runtime_caps_classified['result'])

-         logging.info('Dependency resolution finished succesfully.')

- 

      def run(self):

          if len(self.pkgs) == 1:

              self._get_pkg_info()

-         self._get_dependencies()

          self._update_module_md()

          self._save_module_md()

file modified
+5 -2
@@ -17,7 +17,7 @@ 

      ),

      license='MIT',

      keywords='modularization modularity module modulemd fedora',

-     url='https://pagure.io/modularity/modularity-tools',

+     url='https://pagure.io/modularity/fedmod',

      entry_point={

          'console_scripts': [

              'fedmod=modularity.cli.ModtoolsCliHelper.run'
@@ -25,7 +25,10 @@ 

      },

      install_requires=[

          'modulemd',

-         'pdc-client',

+         'click',

+         'beautifulsoup4',

+         'requests',

+         'requests-toolbelt',

      ],

      packages=find_packages(),

  )

  • module deps differ is gone
  • refactored fedmod._depchase to expose
    a resolve() functions as its main API
  • added a metadata download command

@ignatenkobrain I'm having some issues with depchase, where I don't think I'm initialising libsolv correctly. Attempting to call make_pool("x86_64") gives the following tracebacks:

>>> fedmod._depchase.make_pool("x86_64")
Traceback (most recent call last):
  File "/home/ncoghlan/fedoradevel/fedmod/fedmod/_depchase.py", line 305, in load_stub
    repo = repodata.repo.appdata
TypeError: in method 'XRepodata_repo_get', argument 1 of type 'XRepodata *'

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
  File "<stdin>", line 1, in <module>
  File "/home/ncoghlan/fedoradevel/fedmod/fedmod/_depchase.py", line 484, in make_pool
    return setup_pool(arch, setup_repos())
  File "/home/ncoghlan/fedoradevel/fedmod/fedmod/_depchase.py", line 331, in setup_pool
    addedprovides = pool.addfileprovides_queue()
  File "/usr/lib64/python3.6/site-packages/solv.py", line 1173, in addfileprovides_queue
    return _solv.Pool_addfileprovides_queue(self)
SystemError: <built-in function Pool_addfileprovides_queue> returned a result with an error set

it's hard to say from first glance, but I guess your repo.handle.appdata is not pointing to repo...

please don't do this. libsolv parses repomd.xml and can tell you proper URL.

this should be Repo object, not string

This is currently just automating the instructions from https://github.com/fedora-modularity/depchase#installation

Switching to parsing repomd.xml would make sense, but I figure it isn't going to be the cause of the current problem.

The bug turned out to be really mundane, and I fixed it by resolving the unresolved pylint errors: I'd changed the name of one of the metadata loading methods, and missed updating one of the calls to that method.

I also fixed the harcoded repo metadata loading to set repo.srcrepo correctly for the x86_64 repo object.

The tests are still failing, but they're failing complaining about a missing dependency for a package that was last successfully built in Fedora 24 (rubygems-shoulda-matchers): https://koji.fedoraproject.org/koji/packageinfo?packageID=18740

The missing dependency is rubygems-protected_attributes: https://koji.fedoraproject.org/koji/packageinfo?packageID=16831

1 new commit added

  • Fix some problems
6 years ago

1 new commit added

  • More fixes & workarounds:
6 years ago

Pull-Request has been merged by ncoghlan

6 years ago

Pull-Request has been merged by ncoghlan

6 years ago

I went ahead and merged this, with the workaround of not actually including the build dependencies yet.

My rationale for that is that it gets the test suite to a much happier place, since a run completes in 20 seconds rather than a few minutes.