Add bundled() provider for vendoring

This commit add basic support for crate vendoring in Rust. The user will
use something like `cargo vendor` to create a vendor directory (that can
later be deployed as a tgz) that contains all the dependencies.

This patch will analyze the output of `cargo manifest` to calculate the
closure of dependencies, and via the new parameter `--provides-vendor`,
print all the 'bundled(crate(NAME/FEATURE)) = 0.0.0' provided by the
binary.

The algorithm is not perfect, as today it will include all the features
resolved for the crate (not all the availables, tho), but basically is
something like:

1.- A dependency generator macro, cargo_bundled, will call
cargo-inspector like this:

  # In STDIN we will provide the name of the binary
  cargo-inspector --provides-vendor --path %{_builddir}

2.- cargo-inspector will search inside the 'path' tree a Cargo.toml that
generate the binary name send via STDIN.

3.- From this point, we go up to the tree to find the top-most
Cargo.toml, as this will be the directory where .cargo/config is living.
We make this directory our `cwd`.

4.- Using the metadata from `cargo manifest`, we generate the closure of
dependencies required by this binary. To simplify the problem, the
current code do not resolve the features, and accept the one resolved by
cargo as valid. Most of the time this will be OK, maybe will include
some extra features needed for other binaries.

5.- Print the 'bundled()' data.

This code will only be executed in the directory 'vendor' is present in
the top-most directory found on step 3.
This commit is contained in:
Alberto Planas 2019-12-17 17:22:45 +01:00
parent 3487db7b75
commit 246ee489a0
3 changed files with 108 additions and 8 deletions

3
data/cargo_bundled.attr Normal file
View file

@ -0,0 +1,3 @@
%__cargo_bundled_provides %{_bindir}/cargo-inspector --provides-vendor --path %{_builddir}
%__cargo_bundled_flags exeonly
%__cargo_bundled_magic ^(setuid,? )?(setgid,? )?(sticky )?ELF (32|64)-bit.*executable

View file

@ -29,6 +29,16 @@ def _cargo_toml(source_path, path, exclude_vendor=True):
if binary in _get_binaries(cargo_toml): if binary in _get_binaries(cargo_toml):
return cargo_toml return cargo_toml
raise Exception(f'Cargo.toml not found for binary {binary}')
def _go_to_top_cargo_toml(source_path, path):
"""Find the top Cargo.toml and change directory."""
path = os.path.dirname(path)
while path != source_path:
if os.path.exists(os.path.join(path, 'Cargo.toml')):
os.chdir(path)
path = os.path.dirname(path)
def main(): def main():
@ -43,15 +53,19 @@ def main():
group.add_argument("-R", "--requires", action="store_true", help="Print Requires") group.add_argument("-R", "--requires", action="store_true", help="Print Requires")
group.add_argument("-BR", "--build-requires", action="store_true", help="Print BuildRequires") group.add_argument("-BR", "--build-requires", action="store_true", help="Print BuildRequires")
group.add_argument("-TR", "--test-requires", action="store_true", help="Print TestRequires") group.add_argument("-TR", "--test-requires", action="store_true", help="Print TestRequires")
group.add_argument("-p", "--path", default=os.getcwd(), help="Path where the source project is living") group.add_argument("-PV", "--provides-vendor", action="store_true", help="Print Provides when vendoring")
fgroup = parser.add_mutually_exclusive_group() fgroup = parser.add_mutually_exclusive_group()
fgroup.add_argument("-a", "--all-features", action="store_true", help="Activate all features") fgroup.add_argument("-a", "--all-features", action="store_true", help="Activate all features")
fgroup.add_argument("-f", "--features", default="default", help="Feature(s) to work on (comma-separated)") fgroup.add_argument("-f", "--features", default="default", help="Feature(s) to work on (comma-separated)")
parser.add_argument("--force", action="store_true", help="Force print vendoring provides")
parser.add_argument("-p", "--path", help="Path where the source project is living")
parser.add_argument("file", nargs="*", help="Path(s) to Cargo.toml") parser.add_argument("file", nargs="*", help="Path(s) to Cargo.toml")
args = parser.parse_args() args = parser.parse_args()
args.path = os.path.abspath(args.path) if args.path else os.getcwd()
files = args.file or sys.stdin.readlines() files = args.file or sys.stdin.readlines()
files = [_cargo_toml(args.path, f) for f in files] files = [_cargo_toml(args.path, f.rstrip()) for f in files]
features = set() features = set()
for f in args.features.split(","): for f in args.features.split(","):
@ -93,9 +107,17 @@ def main():
print_deps(md.requires(f, resolve=True)) print_deps(md.requires(f, resolve=True))
if args.test_requires: if args.test_requires:
print_deps(md.dev_dependencies) print_deps(md.dev_dependencies)
if args.provides_vendor:
# Print the vendoring providers only if the 'vendor'
# directory is present
if args.force or os.path.isdir('vendor'):
print_deps(md.resolved_dependencies())
for f in files: for f in files:
f = f.rstrip() # We need to change directory, so cargo can find any local
# configuration file
_go_to_top_cargo_toml(args.path, f)
mds = Metadata.from_file(f) mds = Metadata.from_file(f)
if isinstance(mds, list): if isinstance(mds, list):
for md in mds: for md in mds:

View file

@ -204,11 +204,12 @@ class Target:
class Dependency: class Dependency:
def __init__(self, name, req=None, features=(), optional=False): def __init__(self, name, req=None, features=(), optional=False, bundled=False):
self.name = name self.name = name
self.req = req self.req = req
self.features = features self.features = features
self.optional = optional self.optional = optional
self.bundled = bundled
@classmethod @classmethod
def from_json(cls, metadata): def from_json(cls, metadata):
@ -222,9 +223,11 @@ class Dependency:
return cls(**kwargs) return cls(**kwargs)
@staticmethod @staticmethod
def _apply_reqs(name, reqs, feature=None): def _apply_reqs(name, reqs, feature=None, bundled=False):
fstr = f"/{feature}" if feature is not None else "" fstr = f"/{feature}" if feature is not None else ""
cap = f"crate({name}{fstr})" cap = f"crate({name}{fstr})"
if bundled:
cap = f"bundled({cap})"
if not reqs: if not reqs:
return cap return cap
deps = ' with '.join( deps = ' with '.join(
@ -237,7 +240,8 @@ class Dependency:
def normalize(self): def normalize(self):
semver = CargoSemVer(self.req) semver = CargoSemVer(self.req)
return [self._apply_reqs(self.name, semver.normalized, feature) return [self._apply_reqs(self.name, semver.normalized, feature,
self.bundled)
for feature in self.features or (None,)] for feature in self.features or (None,)]
def __repr__(self): def __repr__(self):
@ -261,6 +265,7 @@ class Metadata:
self.targets = set() self.targets = set()
self.dependencies = {} self.dependencies = {}
self.dev_dependencies = set() self.dev_dependencies = set()
self._path = None
@property @property
def description(self): def description(self):
@ -366,14 +371,19 @@ class Metadata:
@classmethod @classmethod
def from_file(cls, path): def from_file(cls, path):
try: try:
return cls.from_json(Metadata.manifest(path)) instance = cls.from_json(Metadata.manifest(path))
instance._path = path
return instance
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
# If fails, we still check that is a workspace # If fails, we still check that is a workspace
members = Metadata.members(path) members = Metadata.members(path)
if members: if members:
return [ instances = [
cls.from_json(Metadata.manifest(m)) for m in members cls.from_json(Metadata.manifest(m)) for m in members
] ]
for instance in instances:
instance._path = path
return instances
# Is not a workspace? re-raise the exception # Is not a workspace? re-raise the exception
raise raise
@ -436,6 +446,71 @@ class Metadata:
for feature in features) for feature in features)
return fdeps | deps return fdeps | deps
@staticmethod
def _match_crate(dependency, metadata):
for crate in metadata['resolve']['nodes']:
name, version, _ = crate['id'].split()
if name != dependency.name:
continue
v1 = CargoSemVer.parse_version(version)
normalized = CargoSemVer(dependency.req).normalized
if all(CargoSemVer.eval_(v1, op, v2) for op, v2 in normalized):
return crate
@staticmethod
def _find_crate(dependency, metadata):
for crate in metadata['resolve']['nodes']:
if dependency == crate['id']:
return crate
@staticmethod
def _closure(dependencies, metadata):
# Is not very clear how to decide, for a workspace, what
# features are enabled for a package after resolving all the
# dependencies. We can trace back from the initial set of
# dependencies / features, until the final set of packages
# listed in `cargo metadata`, but this will imply replicate
# the resolution algorithm in cargo.
#
# For now we will do a simple closure for all the dependencies
# declared in the toml file, over the resolved dependencies
# from resolve/nodes/deps from the metadata, and will include
# all the features enabled for each package.
#
closure = []
# Find the correct version of the initial dependencies
for dep in dependencies:
crate = Metadata._match_crate(dep, metadata)
if not crate:
raise ValueError(f'Cannot find crate for {dep}')
closure.append(crate)
# Close over the initial packages
for crate in closure:
for dep in crate['dependencies']:
crate = Metadata._find_crate(dep, metadata)
if not crate:
raise ValueError(f'Cannot find crate for {dep}')
if crate not in closure:
closure.append(crate)
# Transform the crate information to a dependency
dependencies = []
for crate in closure:
name, version, _ = crate['id'].split()
dependencies.append(Dependency(name, f'={version}',
crate['features'] or ('default',),
bundled=True))
return dependencies
def resolved_dependencies(self, feature=None):
if not self._path:
raise ValueError('Metadata instance without Cargo.toml associated')
initial_deps = self._resolve(self.dependencies, feature)[1]
metadata = Metadata.metadata(self._path, deps=True)
return Metadata._closure(initial_deps, metadata)
def normalize_deps(deps): def normalize_deps(deps):
return set().union(*(d.normalize() for d in deps)) return set().union(*(d.normalize() for d in deps))