Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

resolvelib: Stop resolving requirements one-by-one #488

Merged
merged 22 commits into from
Jan 28, 2023
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
b7d46c8
resolvelib: Stop resolving requirements one-by-one
tetsuo-cpp Jan 17, 2023
90e17cf
requirement, resolvelib: Propagate origin requirements for each candi…
tetsuo-cpp Jan 18, 2023
20d16ee
requirement: Use the new origin requirements field
tetsuo-cpp Jan 18, 2023
1f7b15d
Merge branch 'main' into alex/dep-resolution-fix
tetsuo-cpp Jan 18, 2023
8e65f89
Merge remote-tracking branch 'origin/main' into alex/dep-resolution-fix
tetsuo-cpp Jan 25, 2023
9517b8d
requirement: Fix `--skip-editable` flag
tetsuo-cpp Jan 25, 2023
7556eb6
Merge remote-tracking branch 'origin/alex/pip-requirements-parser-fix…
tetsuo-cpp Jan 25, 2023
3e396a6
resolvelib: Fix lint
tetsuo-cpp Jan 25, 2023
74cde16
Merge remote-tracking branch 'origin/main' into alex/dep-resolution-fix
tetsuo-cpp Jan 26, 2023
d547d05
test: Remove incorrect online annotation
tetsuo-cpp Jan 26, 2023
5eeef40
requirement, resolvelib: Rename dependee map variables/functions
tetsuo-cpp Jan 26, 2023
4935dd0
test: Get a few resolvelib tests working
tetsuo-cpp Jan 26, 2023
92bc864
resolvelib: Return a dummy candidate when a project can't be found on…
tetsuo-cpp Jan 27, 2023
9dd0fbf
test: Get tests passing
tetsuo-cpp Jan 27, 2023
f0d6b5d
requirement, resolvelib: Fix lint
tetsuo-cpp Jan 27, 2023
894168e
resolvelib: Document new classes
tetsuo-cpp Jan 27, 2023
e16fcd9
Merge branch 'main' into alex/dep-resolution-fix
tetsuo-cpp Jan 27, 2023
2912a2f
CHANGELOG: Add changelog entry
tetsuo-cpp Jan 27, 2023
8709ef6
dependency_source: Mark the interface with "no cover"
tetsuo-cpp Jan 27, 2023
2e4079b
test: Fill out remaining coverage
tetsuo-cpp Jan 28, 2023
a2d8cf3
Merge branch 'main' into alex/dep-resolution-fix
tetsuo-cpp Jan 28, 2023
cecbb6c
requirement: Use `pass` statement to check for coverage and mark
tetsuo-cpp Jan 28, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ All versions prior to 0.0.9 are untracked.
with an internal API change
([#499](https://github.com/pypa/pip-audit/pull/499))

* Fixed a dependency resolution bug that can potentially be triggered when
multiple packages have the same subdependency
([#488](https://github.com/pypa/pip-audit/pull/488))

## [2.4.14]

### Fixed
Expand Down
20 changes: 4 additions & 16 deletions pip_audit/_dependency_source/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,34 +157,22 @@ def supported_algorithms(self, req_name: str) -> list[str]:

class DependencyResolver(ABC):
"""
Represents an abstract resolver of Python dependencies that takes a single
dependency and returns all of its transitive dependencies.
Represents an abstract resolver of Python dependencies that takes a list of
dependencies and returns all of their transitive dependencies.

Concrete dependency sources may use a resolver as part of their
implementation.
"""

@abstractmethod
def resolve(
self, req: Requirement, req_hashes: RequirementHashes
self, reqs: list[Requirement], req_hashes: RequirementHashes
) -> list[Dependency]: # pragma: no cover
"""
Resolve a single `Requirement` into a list of `Dependency` instances.
Resolve a list of `Requirement`s into a list of resolved `Dependency`s.
"""
raise NotImplementedError

def resolve_all(
self, reqs: Iterator[Requirement], req_hashes: RequirementHashes
) -> Iterator[tuple[Requirement, list[Dependency]]]:
"""
Resolve a collection of `Requirement`s into their respective `Dependency` sets.

`DependencyResolver` implementations can override this implementation with
a more optimized one.
"""
for req in reqs:
yield (req, self.resolve(req, req_hashes))


class DependencyResolverError(Exception):
"""
Expand Down
29 changes: 14 additions & 15 deletions pip_audit/_dependency_source/pyproject.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,21 +77,20 @@ def collect(self) -> Iterator[Dependency]:
reqs: list[Requirement] = [Requirement(dep) for dep in deps]
req_hashes = RequirementHashes()
try:
for _, deps in self.resolver.resolve_all(iter(reqs), req_hashes):
for dep in deps:
# Don't allow duplicate dependencies to be returned
if dep in collected:
continue

if dep.is_skipped(): # pragma: no cover
dep = cast(SkippedDependency, dep)
self.state.update_state(f"Skipping {dep.name}: {dep.skip_reason}")
else:
dep = cast(ResolvedDependency, dep)
self.state.update_state(f"Collecting {dep.name} ({dep.version})")

collected.add(dep)
yield dep
for dep in self.resolver.resolve(reqs, req_hashes):
# Don't allow duplicate dependencies to be returned
if dep in collected:
continue

if dep.is_skipped(): # pragma: no cover
dep = cast(SkippedDependency, dep)
self.state.update_state(f"Skipping {dep.name}: {dep.skip_reason}")
else:
dep = cast(ResolvedDependency, dep)
self.state.update_state(f"Collecting {dep.name} ({dep.version})")

collected.add(dep)
yield dep
except DependencyResolverError as dre:
raise PyProjectSourceError("dependency resolver raised an error") from dre

Expand Down
98 changes: 59 additions & 39 deletions pip_audit/_dependency_source/requirement.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import re
import shutil
from contextlib import ExitStack
from dataclasses import dataclass, field
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import IO, Iterator, cast
Expand Down Expand Up @@ -76,15 +77,15 @@ def __init__(
self._no_deps = no_deps
self._skip_editable = skip_editable
self.state = state
self._dep_cache: dict[Path, dict[Requirement, set[Dependency]]] = {}
self._dep_cache: dict[Path, set[Dependency]] = {}

def collect(self) -> Iterator[Dependency]:
"""
Collect all of the dependencies discovered by this `RequirementSource`.

Raises a `RequirementSourceError` on any errors.
"""
collected: set[Dependency] = set()
collected: dict[str, Dependency] = dict()
for filename in self._filenames:
try:
rf = RequirementsFile.from_file(filename)
Expand Down Expand Up @@ -123,14 +124,34 @@ def collect(self) -> Iterator[Dependency]:
req_names.add(req.name)
reqs.append(req)

for _, dep in self._collect_cached_deps(filename, reqs):
if dep in collected:
for dep in self._collect_cached_deps(filename, reqs):
if dep.canonical_name in collected:
existing_dep = collected[dep.canonical_name]
if isinstance(dep, SkippedDependency) or isinstance(
existing_dep, SkippedDependency
):
continue

dep = cast(RequirementDependency, dep)
existing_dep = cast(RequirementDependency, existing_dep)

# If we have the same dependency generated from multiple files, we need to
# merge the dependee requirements.
combined_dep = RequirementDependency(
name=dep.name,
version=dep.version,
dependee_reqs=(dep.dependee_reqs | existing_dep.dependee_reqs),
)

collected[dep.canonical_name] = combined_dep
continue
collected.add(dep)
yield dep

collected[dep.canonical_name] = dep
except DependencyResolverError as dre:
raise RequirementSourceError(str(dre))

yield from collected.values()

def fix(self, fix_version: ResolvedFixVersion) -> None:
"""
Fixes a dependency version for this `RequirementSource`.
Expand Down Expand Up @@ -211,23 +232,20 @@ def _fix_file(self, filename: Path, fix_version: ResolvedFixVersion) -> None:
# another.
try:
if not fixed:
installed_reqs: list[InstallRequirement] = [
r for r in reqs if isinstance(r, InstallRequirement)
]
origin_reqs: set[Requirement] = set()
for req, dep in self._collect_cached_deps(filename, list(installed_reqs)):
if fix_version.dep == dep:
origin_reqs.add(req)
if origin_reqs:
req_dep = cast(RequirementDependency, fix_version.dep)
if req_dep.dependee_reqs:
logger.warning(
"added fixed subdependency explicitly to requirements file "
f"{filename}: {fix_version.dep.canonical_name}"
)
origin_reqs_formatted = ",".join(
[str(req) for req in sorted(list(origin_reqs), key=lambda x: x.name)]
dependee_reqs_formatted = ",".join(
[
str(req)
for req in sorted(list(req_dep.dependee_reqs), key=lambda x: x.name)
]
)
print(
f" # pip-audit: subdependency fixed via {origin_reqs_formatted}",
f" # pip-audit: subdependency fixed via {dependee_reqs_formatted}",
file=f,
)
print(f"{fix_version.dep.canonical_name}=={fix_version.version}", file=f)
Expand Down Expand Up @@ -274,7 +292,7 @@ def _collect_preresolved_deps(
f"requirement {req.name} is not pinned to an exact version: {str(req)}"
)

yield req.req, ResolvedDependency(
yield req.req, RequirementDependency(
req.name, Version(pinned_specifier.group("version"))
)

Expand All @@ -293,27 +311,23 @@ def _build_hash_options_mapping(self, hash_options: list[str]) -> dict[str, list

def _collect_cached_deps(
self, filename: Path, reqs: list[InstallRequirement]
) -> Iterator[tuple[Requirement, Dependency]]:
) -> Iterator[Dependency]:
"""
Collect resolved dependencies for a given requirements file, retrieving them from the
dependency cache if possible.
"""
# See if we've already have cached dependencies for this file
cached_deps_for_file = self._dep_cache.get(filename, None)
if cached_deps_for_file is not None:
for req, deps in cached_deps_for_file.items():
for dep in deps:
yield req, dep
yield from cached_deps_for_file

new_cached_deps_for_file: dict[Requirement, set[Dependency]] = dict()
new_cached_deps_for_file: set[Dependency] = set()

# Skip dependency resolution if the user has specified `--no-deps`
if self._no_deps:
for req, dep in self._collect_preresolved_deps(iter(reqs)):
if req not in new_cached_deps_for_file:
new_cached_deps_for_file[req] = set()
new_cached_deps_for_file[req].add(dep)
yield req, dep
new_cached_deps_for_file.add(dep)
yield dep
else:
require_hashes = self._require_hashes or any(req.hash_options for req in reqs)
req_hashes = RequirementHashes()
Expand All @@ -331,20 +345,17 @@ def _collect_cached_deps(

# Invoke the dependency resolver to turn requirements into dependencies
req_values: list[Requirement] = [r.req for r in reqs]
for req, resolved_deps in self._resolver.resolve_all(iter(req_values), req_hashes):
for dep in resolved_deps:
if req not in new_cached_deps_for_file:
new_cached_deps_for_file[req] = set()
new_cached_deps_for_file[req].add(dep)
for dep in self._resolver.resolve(req_values, req_hashes):
new_cached_deps_for_file.add(dep)

if dep.is_skipped(): # pragma: no cover
dep = cast(SkippedDependency, dep)
self.state.update_state(f"Skipping {dep.name}: {dep.skip_reason}")
else:
dep = cast(ResolvedDependency, dep)
self.state.update_state(f"Collecting {dep.name} ({dep.version})")
if dep.is_skipped(): # pragma: no cover
dep = cast(SkippedDependency, dep)
self.state.update_state(f"Skipping {dep.name}: {dep.skip_reason}")
else:
dep = cast(ResolvedDependency, dep)
self.state.update_state(f"Collecting {dep.name} ({dep.version})")

yield req, dep
yield dep

# Cache the collected dependencies
self._dep_cache[filename] = new_cached_deps_for_file
Expand All @@ -360,3 +371,12 @@ class RequirementFixError(DependencyFixError):
"""A requirements-fixing specific `DependencyFixError`."""

pass


@dataclass(frozen=True)
class RequirementDependency(ResolvedDependency):
"""
Represents a fully resolved Python package from a requirements file.
"""

dependee_reqs: set[Requirement] = field(default_factory=set, hash=False)
Loading