| |
@@ -3,8 +3,10 @@
|
| |
import collections
|
| |
import copy
|
| |
import json
|
| |
+ import os.path
|
| |
import re
|
| |
import subprocess
|
| |
+ from urllib.parse import urlparse
|
| |
|
| |
|
| |
Requirement = collections.namedtuple('Requirement', ('kind',
|
| |
@@ -162,6 +164,35 @@
|
| |
raise ValueError(f'Found unhandled kind: {requirement}')
|
| |
return normalized
|
| |
|
| |
+ @staticmethod
|
| |
+ def eval_(v1, op, v2):
|
| |
+ if op == CargoSemVer.KIND_SHORTEQ:
|
| |
+ return all((v1.major == v2.major,
|
| |
+ v1.minor == v2.minor,
|
| |
+ v1.patch == v2.patch))
|
| |
+ elif op == CargoSemVer.KIND_GT:
|
| |
+ return ((v1.major > v2.major) or
|
| |
+ (v1.major == v2.major and v1.minor > v2.minor) or
|
| |
+ (v1.major == v2.major and v1.minor == v2.minor and
|
| |
+ v1.patch > v2.patch))
|
| |
+ elif op == CargoSemVer.KIND_GTE:
|
| |
+ return ((v1.major >= v2.major) or
|
| |
+ (v1.major == v2.major and v1.minor >= v2.minor) or
|
| |
+ (v1.major == v2.major and v1.minor == v2.minor and
|
| |
+ v1.patch >= v2.patch))
|
| |
+ elif op == CargoSemVer.KIND_LT:
|
| |
+ return ((v1.major < v2.major) or
|
| |
+ (v1.major == v2.major and v1.minor < v2.minor) or
|
| |
+ (v1.major == v2.major and v1.minor == v2.minor and
|
| |
+ v1.patch < v2.patch))
|
| |
+ elif op == CargoSemVer.KIND_LTE:
|
| |
+ return ((v1.major <= v2.major) or
|
| |
+ (v1.major == v2.major and v1.minor <= v2.minor) or
|
| |
+ (v1.major == v2.major and v1.minor == v2.minor and
|
| |
+ v1.patch <= v2.patch))
|
| |
+ else:
|
| |
+ raise ValueError(f'Cannot evaluate operator: {op}')
|
| |
+
|
| |
|
| |
class Target:
|
| |
def __init__(self, name, kind):
|
| |
@@ -173,11 +204,13 @@
|
| |
|
| |
|
| |
class Dependency:
|
| |
- def __init__(self, name, req=None, features=(), optional=False):
|
| |
+ def __init__(self, name, req=None, features=(), optional=False,
|
| |
+ bundled=False):
|
| |
self.name = name
|
| |
self.req = req
|
| |
self.features = features
|
| |
self.optional = optional
|
| |
+ self.bundled = bundled
|
| |
|
| |
@classmethod
|
| |
def from_json(cls, metadata):
|
| |
@@ -191,9 +224,11 @@
|
| |
return cls(**kwargs)
|
| |
|
| |
@staticmethod
|
| |
- def _apply_reqs(name, reqs, feature=None):
|
| |
+ def _apply_reqs(name, reqs, feature=None, bundled=False):
|
| |
fstr = f"/{feature}" if feature is not None else ""
|
| |
cap = f"crate({name}{fstr})"
|
| |
+ if bundled:
|
| |
+ cap = f"bundled({cap})"
|
| |
if not reqs:
|
| |
return cap
|
| |
deps = ' with '.join(
|
| |
@@ -206,11 +241,13 @@
|
| |
|
| |
def normalize(self):
|
| |
semver = CargoSemVer(self.req)
|
| |
- return [self._apply_reqs(self.name, semver.normalized, feature)
|
| |
+ return [self._apply_reqs(self.name, semver.normalized, feature,
|
| |
+ self.bundled)
|
| |
for feature in self.features or (None,)]
|
| |
|
| |
def __repr__(self):
|
| |
- return f"<Dependency: {self.name} {self.req} ({', '.join(sorted(self.features))})>"
|
| |
+ features = sorted(feature for feature in self.features if feature)
|
| |
+ return f"<Dependency: {self.name} {self.req} ({', '.join(features)})>"
|
| |
|
| |
def __str__(self):
|
| |
return "\n".join(self.normalize())
|
| |
@@ -230,6 +267,7 @@
|
| |
self.targets = set()
|
| |
self.dependencies = {}
|
| |
self.dev_dependencies = set()
|
| |
+ self._path = None
|
| |
|
| |
@property
|
| |
def description(self):
|
| |
@@ -272,9 +310,10 @@
|
| |
self._summary = description[:p]
|
| |
|
| |
@classmethod
|
| |
- def from_json(cls, metadata):
|
| |
+ def from_json(cls, metadata, path):
|
| |
md = metadata
|
| |
self = cls(md["name"], md["version"])
|
| |
+ self._path = path
|
| |
|
| |
self.license = md["license"]
|
| |
self.license_file = md["license_file"]
|
| |
@@ -333,10 +372,44 @@
|
| |
return self
|
| |
|
| |
@classmethod
|
| |
- def from_file(cls, path):
|
| |
- metadata = subprocess.check_output(["cargo", "read-manifest",
|
| |
- f"--manifest-path={path}"])
|
| |
- return cls.from_json(json.loads(metadata))
|
| |
+ def from_file(cls, path, include_members=False):
|
| |
+ instances = []
|
| |
+ members = Metadata.members(path) if include_members else []
|
| |
+ for member in (members or [path]):
|
| |
+ instance = cls.from_json(Metadata.manifest(member), member)
|
| |
+ instances.append(instance)
|
| |
+ return instances
|
| |
+
|
| |
+ @staticmethod
|
| |
+ def manifest(path, check=True):
|
| |
+ output = subprocess.run(
|
| |
+ ["cargo", "read-manifest", f"--manifest-path={path}"],
|
| |
+ check=check, capture_output=True
|
| |
+ )
|
| |
+ try:
|
| |
+ result = json.loads(output.stdout)
|
| |
+ except json.decoder.JSONDecodeError:
|
| |
+ # Pure virtual manifest cannot be read, we need to use one
|
| |
+ # from the different workspaces
|
| |
+ result = {}
|
| |
+ return result
|
| |
+
|
| |
+ @staticmethod
|
| |
+ def metadata(path, deps=False):
|
| |
+ cmd = ["cargo", "metadata", "--format-version=1",
|
| |
+ f"--manifest-path={path}"]
|
| |
+ if not deps:
|
| |
+ cmd.append("--no-deps")
|
| |
+ return json.loads(subprocess.check_output(cmd))
|
| |
+
|
| |
+ @staticmethod
|
| |
+ def members(path):
|
| |
+ members = []
|
| |
+ metadata = Metadata.metadata(path)
|
| |
+ for workspace in metadata.get('workspace_members', []):
|
| |
+ path = re.search(r'\((.*)\)', workspace).group(1)
|
| |
+ members.append(os.path.join(urlparse(path).path, 'Cargo.toml'))
|
| |
+ return members
|
| |
|
| |
@property
|
| |
def all_dependencies(self):
|
| |
@@ -369,6 +442,71 @@
|
| |
for feature in features)
|
| |
return fdeps | deps
|
| |
|
| |
+ @staticmethod
|
| |
+ def _match_crate(dependency, metadata):
|
| |
+ for crate in metadata['resolve']['nodes']:
|
| |
+ name, version, _ = crate['id'].split()
|
| |
+ if name != dependency.name:
|
| |
+ continue
|
| |
+ v1 = CargoSemVer.parse_version(version)
|
| |
+ normalized = CargoSemVer(dependency.req).normalized
|
| |
+ if all(CargoSemVer.eval_(v1, op, v2) for op, v2 in normalized):
|
| |
+ return crate
|
| |
+
|
| |
+ @staticmethod
|
| |
+ def _find_crate(dependency, metadata):
|
| |
+ for crate in metadata['resolve']['nodes']:
|
| |
+ if dependency == crate['id']:
|
| |
+ return crate
|
| |
+
|
| |
+ @staticmethod
|
| |
+ def _closure(dependencies, metadata):
|
| |
+ # It is not very clear how to decide, for a workspace, what
|
| |
+ # features are enabled for a package after resolving all the
|
| |
+ # dependencies. We can trace back from the initial set of
|
| |
+ # dependencies / features, until the final set of packages
|
| |
+ # listed in `cargo metadata`, but this will imply replicate
|
| |
+ # the resolution algorithm in cargo.
|
| |
+ #
|
| |
+ # For now we will do a simple closure for all the dependencies
|
| |
+ # declared in the toml file, over the resolved dependencies
|
| |
+ # from resolve/nodes/deps from the metadata, and will include
|
| |
+ # all the features enabled for each package.
|
| |
+ #
|
| |
+ closure = []
|
| |
+ # Find the correct version of the initial dependencies
|
| |
+ for dep in dependencies:
|
| |
+ crate = Metadata._match_crate(dep, metadata)
|
| |
+ if not crate:
|
| |
+ raise ValueError(f'Cannot find crate for {dep}')
|
| |
+ closure.append(crate)
|
| |
+
|
| |
+ # Close over the initial packages
|
| |
+ for crate in closure:
|
| |
+ for dep in crate['dependencies']:
|
| |
+ crate = Metadata._find_crate(dep, metadata)
|
| |
+ if not crate:
|
| |
+ raise ValueError(f'Cannot find crate for {dep}')
|
| |
+ if crate not in closure:
|
| |
+ closure.append(crate)
|
| |
+
|
| |
+ # Transform the crate information to a dependency
|
| |
+ dependencies = []
|
| |
+ for crate in closure:
|
| |
+ name, version, _ = crate['id'].split()
|
| |
+ dependencies.append(Dependency(name, f'={version}',
|
| |
+ crate['features'] or ('default',),
|
| |
+ bundled=True))
|
| |
+ return dependencies
|
| |
+
|
| |
+ def resolved_dependencies(self, feature=None):
|
| |
+ if not self._path:
|
| |
+ raise ValueError('Metadata instance without Cargo.toml associated')
|
| |
+
|
| |
+ initial_deps = self._resolve(self.dependencies, feature)[1]
|
| |
+ metadata = Metadata.metadata(self._path, deps=True)
|
| |
+ return Metadata._closure(initial_deps, metadata)
|
| |
+
|
| |
|
| |
def normalize_deps(deps):
|
| |
return set().union(*(d.normalize() for d in deps))
|
| |
support virtual manifest
Some Cargo.toml manifest files are only references to workspaces. Metadata now recognize those and replace it with all the Cargo.toml files that compose this workspace.
find Cargo.toml from binary
We can pass as a parameter a binary name, instead of a Cargo.toml. cargo-inspector will dig into all the Cargo.toml in the workspace (if any), and find the one that declare a binary target that match the binary file.
add simple version evaluator
Extend the CargoSemVer class to check if a version march the requirements. Will be used in future commits
Add bundled() provider for vendoring
This commit add basic support for crate vendoring in Rust. The user will
use something like
cargo vendor
to create a vendor directory (that canlater be deployed as a tgz) that contains all the dependencies.
This patch will analyze the output of
cargo manifest
to calculate theclosure of dependencies, and via the new parameter
--provides-vendor
,print all the 'bundled(crate(NAME/FEATURE)) = 0.0.0' provided by the
binary.
The algorithm is not perfect, as today it will include all the features
resolved for the crate (not all the availables, tho), but basically is
something like:
1.- A dependency generator macro, cargo_bundled, will call
cargo-inspector like this:
# In STDIN we will provide the name of the binary
cargo-inspector --provides-vendor --path %{_builddir}
2.- cargo-inspector will search inside the 'path' tree a Cargo.toml that
generate the binary name send via STDIN.
3.- From this point, we go up to the tree to find the top-most
Cargo.toml, as this will be the directory where .cargo/config is living.
We make this directory our
cwd
.4.- Using the metadata from
cargo manifest
, we generate the closure ofdependencies required by this binary. To simplify the problem, the
current code do not resolve the features, and accept the one resolved by
cargo as valid. Most of the time this will be OK, maybe will include
some extra features needed for other binaries.
5.- Print the 'bundled()' data.
This code will only be executed in the directory 'vendor' is present in
the top-most directory found on step 3.