Remove duplicated output at the end

This commit is contained in:
Alberto Planas 2021-03-26 14:15:46 +01:00
parent 9269105f5b
commit 981aeed93d
2 changed files with 30 additions and 20 deletions

View file

@ -4,7 +4,7 @@ import os
import sys
from . import Metadata
from .metadata import normalize_deps
from .metadata import normalize_deps, Dependency
def _get_binaries(cargo_toml):
@ -81,49 +81,59 @@ def main():
features.add(f or None)
def process_metadata(md):
data = set()
data = []
if args.name:
data.add(md.name)
data.append(md.name)
if args.version:
data.add(md._version)
data.append(md._version)
if args.rpm_version:
data.add(md.version)
data.append(md.version)
if args.target_kinds:
data.update(tgt.kind for tgt in md.targets)
data.extend(tgt.kind for tgt in md.targets)
if args.list_features:
data.update(f for f in md.dependencies if f is not None)
data.extend(f for f in md.dependencies if f is not None)
if args.provides:
data.update(md.provides(f) for f in features)
data.extend(md.provides(f) for f in features)
if args.requires:
# Someone should own /usr/share/cargo/registry
data.add('cargo')
data.append('cargo')
if args.all_features:
data.update(md.all_dependencies)
data.extend(md.all_dependencies)
else:
for f in features:
data.update(md.requires(f))
data.extend(md.requires(f))
if args.build_requires:
data.add("rust-packaging")
data.append("rust-packaging")
if args.all_features:
data.update(md.all_dependencies)
data.extend(md.all_dependencies)
else:
for f in features:
data.update(md.requires(f, resolve=True))
data.extend(md.requires(f, resolve=True))
if args.test_requires:
data.update(md.dev_dependencies)
data.extend(md.dev_dependencies)
if args.provides_vendor:
# Print the vendoring providers only if the 'vendor'
# directory is present
if args.vendor or os.path.isdir('vendor'):
data.update(md.resolved_dependencies())
data.extend(md.resolved_dependencies())
return data
for f in files:
data = set()
mds = Metadata.from_file(f, include_members=args.include_workspaces)
for md in mds:
data.update(process_metadata(md))
for line in data:
# process_metadata can return an [string], but can be also
# a [Dependency] instances, that once presented have
# multiple substrings. If we want to order the data and
# remove all the duplicates we should first normalize it
metadata_lines = []
for metadata in process_metadata(md):
if isinstance(metadata, Dependency):
metadata_lines.extend(metadata.normalize())
else:
metadata_lines.append(metadata)
data.update(metadata_lines)
for line in sorted(data):
print(line)

View file

@ -371,9 +371,9 @@ class Metadata:
return self
@classmethod
def from_file(cls, path):
def from_file(cls, path, include_members=False):
instances = []
members = Metadata.members(path)
members = Metadata.members(path) if include_members else []
for member in (members or [path]):
instance = cls.from_json(Metadata.manifest(member))
instance._path = member