This commit is contained in:
Iliyan Angelov
2025-09-14 23:24:25 +03:00
commit c67067a2a4
71311 changed files with 6800714 additions and 0 deletions

View File

@@ -0,0 +1,27 @@
__all__ = [
"AbstractProvider",
"AbstractResolver",
"BaseReporter",
"InconsistentCandidate",
"RequirementsConflicted",
"ResolutionError",
"ResolutionImpossible",
"ResolutionTooDeep",
"Resolver",
"__version__",
]
__version__ = "1.2.0"
from .providers import AbstractProvider
from .reporters import BaseReporter
from .resolvers import (
AbstractResolver,
InconsistentCandidate,
RequirementsConflicted,
ResolutionError,
ResolutionImpossible,
ResolutionTooDeep,
Resolver,
)

View File

@@ -0,0 +1,196 @@
from __future__ import annotations
from typing import (
TYPE_CHECKING,
Generic,
Iterable,
Iterator,
Mapping,
Sequence,
)
from .structs import CT, KT, RT, Matches, RequirementInformation
if TYPE_CHECKING:
from typing import Any, Protocol
class Preference(Protocol):
def __lt__(self, __other: Any) -> bool: ...
class AbstractProvider(Generic[RT, CT, KT]):
"""Delegate class to provide the required interface for the resolver."""
def identify(self, requirement_or_candidate: RT | CT) -> KT:
"""Given a requirement or candidate, return an identifier for it.
This is used to identify, e.g. whether two requirements
should have their specifier parts merged or a candidate matches a
requirement via ``find_matches()``.
"""
raise NotImplementedError
def get_preference(
self,
identifier: KT,
resolutions: Mapping[KT, CT],
candidates: Mapping[KT, Iterator[CT]],
information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]],
backtrack_causes: Sequence[RequirementInformation[RT, CT]],
) -> Preference:
"""Produce a sort key for given requirement based on preference.
As this is a sort key it will be called O(n) times per backtrack
step, where n is the number of `identifier`s, if you have a check
which is expensive in some sense. E.g. It needs to make O(n) checks
per call or takes significant wall clock time, consider using
`narrow_requirement_selection` to filter the `identifier`s, which
is applied before this sort key is called.
The preference is defined as "I think this requirement should be
resolved first". The lower the return value is, the more preferred
this group of arguments is.
:param identifier: An identifier as returned by ``identify()``. This
identifies the requirement being considered.
:param resolutions: Mapping of candidates currently pinned by the
resolver. Each key is an identifier, and the value is a candidate.
The candidate may conflict with requirements from ``information``.
:param candidates: Mapping of each dependency's possible candidates.
Each value is an iterator of candidates.
:param information: Mapping of requirement information of each package.
Each value is an iterator of *requirement information*.
:param backtrack_causes: Sequence of *requirement information* that are
the requirements that caused the resolver to most recently
backtrack.
A *requirement information* instance is a named tuple with two members:
* ``requirement`` specifies a requirement contributing to the current
list of candidates.
* ``parent`` specifies the candidate that provides (depended on) the
requirement, or ``None`` to indicate a root requirement.
The preference could depend on various issues, including (not
necessarily in this order):
* Is this package pinned in the current resolution result?
* How relaxed is the requirement? Stricter ones should probably be
worked on first? (I don't know, actually.)
* How many possibilities are there to satisfy this requirement? Those
with few left should likely be worked on first, I guess?
* Are there any known conflicts for this requirement? We should
probably work on those with the most known conflicts.
A sortable value should be returned (this will be used as the ``key``
parameter of the built-in sorting function). The smaller the value is,
the more preferred this requirement is (i.e. the sorting function
is called with ``reverse=False``).
"""
raise NotImplementedError
def find_matches(
self,
identifier: KT,
requirements: Mapping[KT, Iterator[RT]],
incompatibilities: Mapping[KT, Iterator[CT]],
) -> Matches[CT]:
"""Find all possible candidates that satisfy the given constraints.
:param identifier: An identifier as returned by ``identify()``. All
candidates returned by this method should produce the same
identifier.
:param requirements: A mapping of requirements that all returned
candidates must satisfy. Each key is an identifier, and the value
an iterator of requirements for that dependency.
:param incompatibilities: A mapping of known incompatibile candidates of
each dependency. Each key is an identifier, and the value an
iterator of incompatibilities known to the resolver. All
incompatibilities *must* be excluded from the return value.
This should try to get candidates based on the requirements' types.
For VCS, local, and archive requirements, the one-and-only match is
returned, and for a "named" requirement, the index(es) should be
consulted to find concrete candidates for this requirement.
The return value should produce candidates ordered by preference; the
most preferred candidate should come first. The return type may be one
of the following:
* A callable that returns an iterator that yields candidates.
* An collection of candidates.
* An iterable of candidates. This will be consumed immediately into a
list of candidates.
"""
raise NotImplementedError
def is_satisfied_by(self, requirement: RT, candidate: CT) -> bool:
"""Whether the given requirement can be satisfied by a candidate.
The candidate is guaranteed to have been generated from the
requirement.
A boolean should be returned to indicate whether ``candidate`` is a
viable solution to the requirement.
"""
raise NotImplementedError
def get_dependencies(self, candidate: CT) -> Iterable[RT]:
"""Get dependencies of a candidate.
This should return a collection of requirements that `candidate`
specifies as its dependencies.
"""
raise NotImplementedError
def narrow_requirement_selection(
self,
identifiers: Iterable[KT],
resolutions: Mapping[KT, CT],
candidates: Mapping[KT, Iterator[CT]],
information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]],
backtrack_causes: Sequence[RequirementInformation[RT, CT]],
) -> Iterable[KT]:
"""
An optional method to narrow the selection of requirements being
considered during resolution. This method is called O(1) time per
backtrack step.
:param identifiers: An iterable of `identifiers` as returned by
``identify()``. These identify all requirements currently being
considered.
:param resolutions: A mapping of candidates currently pinned by the
resolver. Each key is an identifier, and the value is a candidate
that may conflict with requirements from ``information``.
:param candidates: A mapping of each dependency's possible candidates.
Each value is an iterator of candidates.
:param information: A mapping of requirement information for each package.
Each value is an iterator of *requirement information*.
:param backtrack_causes: A sequence of *requirement information* that are
the requirements causing the resolver to most recently
backtrack.
A *requirement information* instance is a named tuple with two members:
* ``requirement`` specifies a requirement contributing to the current
list of candidates.
* ``parent`` specifies the candidate that provides (is depended on for)
the requirement, or ``None`` to indicate a root requirement.
Must return a non-empty subset of `identifiers`, with the default
implementation being to return `identifiers` unchanged. Those `identifiers`
will then be passed to the sort key `get_preference` to pick the most
prefered requirement to attempt to pin, unless `narrow_requirement_selection`
returns only 1 requirement, in which case that will be used without
calling the sort key `get_preference`.
This method is designed to be used by the provider to optimize the
dependency resolution, e.g. if a check cost is O(m) and it can be done
against all identifiers at once then filtering the requirement selection
here will cost O(m) but making it part of the sort key in `get_preference`
will cost O(m*n), where n is the number of `identifiers`.
Returns:
Iterable[KT]: A non-empty subset of `identifiers`.
"""
return identifiers

View File

@@ -0,0 +1,55 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Collection, Generic
from .structs import CT, KT, RT, RequirementInformation, State
if TYPE_CHECKING:
from .resolvers import Criterion
class BaseReporter(Generic[RT, CT, KT]):
"""Delegate class to provide progress reporting for the resolver."""
def starting(self) -> None:
"""Called before the resolution actually starts."""
def starting_round(self, index: int) -> None:
"""Called before each round of resolution starts.
The index is zero-based.
"""
def ending_round(self, index: int, state: State[RT, CT, KT]) -> None:
"""Called before each round of resolution ends.
This is NOT called if the resolution ends at this round. Use `ending`
if you want to report finalization. The index is zero-based.
"""
def ending(self, state: State[RT, CT, KT]) -> None:
"""Called before the resolution ends successfully."""
def adding_requirement(self, requirement: RT, parent: CT | None) -> None:
"""Called when adding a new requirement into the resolve criteria.
:param requirement: The additional requirement to be applied to filter
the available candidaites.
:param parent: The candidate that requires ``requirement`` as a
dependency, or None if ``requirement`` is one of the root
requirements passed in from ``Resolver.resolve()``.
"""
def resolving_conflicts(
self, causes: Collection[RequirementInformation[RT, CT]]
) -> None:
"""Called when starting to attempt requirement conflict resolution.
:param causes: The information on the collision that caused the backtracking.
"""
def rejecting_candidate(self, criterion: Criterion[RT, CT], candidate: CT) -> None:
"""Called when rejecting a candidate during backtracking."""
def pinning(self, candidate: CT) -> None:
"""Called when adding a candidate to the potential solution."""

View File

@@ -0,0 +1,27 @@
from ..structs import RequirementInformation
from .abstract import AbstractResolver, Result
from .criterion import Criterion
from .exceptions import (
InconsistentCandidate,
RequirementsConflicted,
ResolutionError,
ResolutionImpossible,
ResolutionTooDeep,
ResolverException,
)
from .resolution import Resolution, Resolver
__all__ = [
"AbstractResolver",
"Criterion",
"InconsistentCandidate",
"RequirementInformation",
"RequirementsConflicted",
"Resolution",
"ResolutionError",
"ResolutionImpossible",
"ResolutionTooDeep",
"Resolver",
"ResolverException",
"Result",
]

View File

@@ -0,0 +1,47 @@
from __future__ import annotations
import collections
from typing import TYPE_CHECKING, Any, Generic, Iterable, Mapping, NamedTuple
from ..structs import CT, KT, RT, DirectedGraph
if TYPE_CHECKING:
from ..providers import AbstractProvider
from ..reporters import BaseReporter
from .criterion import Criterion
class Result(NamedTuple, Generic[RT, CT, KT]):
mapping: Mapping[KT, CT]
graph: DirectedGraph[KT | None]
criteria: Mapping[KT, Criterion[RT, CT]]
else:
Result = collections.namedtuple("Result", ["mapping", "graph", "criteria"])
class AbstractResolver(Generic[RT, CT, KT]):
"""The thing that performs the actual resolution work."""
base_exception = Exception
def __init__(
self,
provider: AbstractProvider[RT, CT, KT],
reporter: BaseReporter[RT, CT, KT],
) -> None:
self.provider = provider
self.reporter = reporter
def resolve(self, requirements: Iterable[RT], **kwargs: Any) -> Result[RT, CT, KT]:
"""Take a collection of constraints, spit out the resolution result.
This returns a representation of the final resolution state, with one
guarenteed attribute ``mapping`` that contains resolved candidates as
values. The keys are their respective identifiers.
:param requirements: A collection of constraints.
:param kwargs: Additional keyword arguments that subclasses may accept.
:raises: ``self.base_exception`` or its subclass.
"""
raise NotImplementedError

View File

@@ -0,0 +1,48 @@
from __future__ import annotations
from typing import Collection, Generic, Iterable, Iterator
from ..structs import CT, RT, RequirementInformation
class Criterion(Generic[RT, CT]):
"""Representation of possible resolution results of a package.
This holds three attributes:
* `information` is a collection of `RequirementInformation` pairs.
Each pair is a requirement contributing to this criterion, and the
candidate that provides the requirement.
* `incompatibilities` is a collection of all known not-to-work candidates
to exclude from consideration.
* `candidates` is a collection containing all possible candidates deducted
from the union of contributing requirements and known incompatibilities.
It should never be empty, except when the criterion is an attribute of a
raised `RequirementsConflicted` (in which case it is always empty).
.. note::
This class is intended to be externally immutable. **Do not** mutate
any of its attribute containers.
"""
def __init__(
self,
candidates: Iterable[CT],
information: Collection[RequirementInformation[RT, CT]],
incompatibilities: Collection[CT],
) -> None:
self.candidates = candidates
self.information = information
self.incompatibilities = incompatibilities
def __repr__(self) -> str:
requirements = ", ".join(
f"({req!r}, via={parent!r})" for req, parent in self.information
)
return f"Criterion({requirements})"
def iter_requirement(self) -> Iterator[RT]:
return (i.requirement for i in self.information)
def iter_parent(self) -> Iterator[CT | None]:
return (i.parent for i in self.information)

View File

@@ -0,0 +1,57 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Collection, Generic
from ..structs import CT, RT, RequirementInformation
if TYPE_CHECKING:
from .criterion import Criterion
class ResolverException(Exception):
"""A base class for all exceptions raised by this module.
Exceptions derived by this class should all be handled in this module. Any
bubbling pass the resolver should be treated as a bug.
"""
class RequirementsConflicted(ResolverException, Generic[RT, CT]):
def __init__(self, criterion: Criterion[RT, CT]) -> None:
super().__init__(criterion)
self.criterion = criterion
def __str__(self) -> str:
return "Requirements conflict: {}".format(
", ".join(repr(r) for r in self.criterion.iter_requirement()),
)
class InconsistentCandidate(ResolverException, Generic[RT, CT]):
def __init__(self, candidate: CT, criterion: Criterion[RT, CT]):
super().__init__(candidate, criterion)
self.candidate = candidate
self.criterion = criterion
def __str__(self) -> str:
return "Provided candidate {!r} does not satisfy {}".format(
self.candidate,
", ".join(repr(r) for r in self.criterion.iter_requirement()),
)
class ResolutionError(ResolverException):
pass
class ResolutionImpossible(ResolutionError, Generic[RT, CT]):
def __init__(self, causes: Collection[RequirementInformation[RT, CT]]):
super().__init__(causes)
# causes is a list of RequirementInformation objects
self.causes = causes
class ResolutionTooDeep(ResolutionError):
def __init__(self, round_count: int) -> None:
super().__init__(round_count)
self.round_count = round_count

View File

@@ -0,0 +1,622 @@
from __future__ import annotations
import collections
import itertools
import operator
from typing import TYPE_CHECKING, Generic
from ..structs import (
CT,
KT,
RT,
DirectedGraph,
IterableView,
IteratorMapping,
RequirementInformation,
State,
build_iter_view,
)
from .abstract import AbstractResolver, Result
from .criterion import Criterion
from .exceptions import (
InconsistentCandidate,
RequirementsConflicted,
ResolutionImpossible,
ResolutionTooDeep,
ResolverException,
)
if TYPE_CHECKING:
from collections.abc import Collection, Iterable, Mapping
from ..providers import AbstractProvider, Preference
from ..reporters import BaseReporter
_OPTIMISTIC_BACKJUMPING_RATIO: float = 0.1
def _build_result(state: State[RT, CT, KT]) -> Result[RT, CT, KT]:
mapping = state.mapping
all_keys: dict[int, KT | None] = {id(v): k for k, v in mapping.items()}
all_keys[id(None)] = None
graph: DirectedGraph[KT | None] = DirectedGraph()
graph.add(None) # Sentinel as root dependencies' parent.
connected: set[KT | None] = {None}
for key, criterion in state.criteria.items():
if not _has_route_to_root(state.criteria, key, all_keys, connected):
continue
if key not in graph:
graph.add(key)
for p in criterion.iter_parent():
try:
pkey = all_keys[id(p)]
except KeyError:
continue
if pkey not in graph:
graph.add(pkey)
graph.connect(pkey, key)
return Result(
mapping={k: v for k, v in mapping.items() if k in connected},
graph=graph,
criteria=state.criteria,
)
class Resolution(Generic[RT, CT, KT]):
"""Stateful resolution object.
This is designed as a one-off object that holds information to kick start
the resolution process, and holds the results afterwards.
"""
def __init__(
self,
provider: AbstractProvider[RT, CT, KT],
reporter: BaseReporter[RT, CT, KT],
) -> None:
self._p = provider
self._r = reporter
self._states: list[State[RT, CT, KT]] = []
# Optimistic backjumping variables
self._optimistic_backjumping_ratio = _OPTIMISTIC_BACKJUMPING_RATIO
self._save_states: list[State[RT, CT, KT]] | None = None
self._optimistic_start_round: int | None = None
@property
def state(self) -> State[RT, CT, KT]:
try:
return self._states[-1]
except IndexError as e:
raise AttributeError("state") from e
def _push_new_state(self) -> None:
"""Push a new state into history.
This new state will be used to hold resolution results of the next
coming round.
"""
base = self._states[-1]
state = State(
mapping=base.mapping.copy(),
criteria=base.criteria.copy(),
backtrack_causes=base.backtrack_causes[:],
)
self._states.append(state)
def _add_to_criteria(
self,
criteria: dict[KT, Criterion[RT, CT]],
requirement: RT,
parent: CT | None,
) -> None:
self._r.adding_requirement(requirement=requirement, parent=parent)
identifier = self._p.identify(requirement_or_candidate=requirement)
criterion = criteria.get(identifier)
if criterion:
incompatibilities = list(criterion.incompatibilities)
else:
incompatibilities = []
matches = self._p.find_matches(
identifier=identifier,
requirements=IteratorMapping(
criteria,
operator.methodcaller("iter_requirement"),
{identifier: [requirement]},
),
incompatibilities=IteratorMapping(
criteria,
operator.attrgetter("incompatibilities"),
{identifier: incompatibilities},
),
)
if criterion:
information = list(criterion.information)
information.append(RequirementInformation(requirement, parent))
else:
information = [RequirementInformation(requirement, parent)]
criterion = Criterion(
candidates=build_iter_view(matches),
information=information,
incompatibilities=incompatibilities,
)
if not criterion.candidates:
raise RequirementsConflicted(criterion)
criteria[identifier] = criterion
def _remove_information_from_criteria(
self, criteria: dict[KT, Criterion[RT, CT]], parents: Collection[KT]
) -> None:
"""Remove information from parents of criteria.
Concretely, removes all values from each criterion's ``information``
field that have one of ``parents`` as provider of the requirement.
:param criteria: The criteria to update.
:param parents: Identifiers for which to remove information from all criteria.
"""
if not parents:
return
for key, criterion in criteria.items():
criteria[key] = Criterion(
criterion.candidates,
[
information
for information in criterion.information
if (
information.parent is None
or self._p.identify(information.parent) not in parents
)
],
criterion.incompatibilities,
)
def _get_preference(self, name: KT) -> Preference:
return self._p.get_preference(
identifier=name,
resolutions=self.state.mapping,
candidates=IteratorMapping(
self.state.criteria,
operator.attrgetter("candidates"),
),
information=IteratorMapping(
self.state.criteria,
operator.attrgetter("information"),
),
backtrack_causes=self.state.backtrack_causes,
)
def _is_current_pin_satisfying(
self, name: KT, criterion: Criterion[RT, CT]
) -> bool:
try:
current_pin = self.state.mapping[name]
except KeyError:
return False
return all(
self._p.is_satisfied_by(requirement=r, candidate=current_pin)
for r in criterion.iter_requirement()
)
def _get_updated_criteria(self, candidate: CT) -> dict[KT, Criterion[RT, CT]]:
criteria = self.state.criteria.copy()
for requirement in self._p.get_dependencies(candidate=candidate):
self._add_to_criteria(criteria, requirement, parent=candidate)
return criteria
def _attempt_to_pin_criterion(self, name: KT) -> list[Criterion[RT, CT]]:
criterion = self.state.criteria[name]
causes: list[Criterion[RT, CT]] = []
for candidate in criterion.candidates:
try:
criteria = self._get_updated_criteria(candidate)
except RequirementsConflicted as e:
self._r.rejecting_candidate(e.criterion, candidate)
causes.append(e.criterion)
continue
# Check the newly-pinned candidate actually works. This should
# always pass under normal circumstances, but in the case of a
# faulty provider, we will raise an error to notify the implementer
# to fix find_matches() and/or is_satisfied_by().
satisfied = all(
self._p.is_satisfied_by(requirement=r, candidate=candidate)
for r in criterion.iter_requirement()
)
if not satisfied:
raise InconsistentCandidate(candidate, criterion)
self._r.pinning(candidate=candidate)
self.state.criteria.update(criteria)
# Put newly-pinned candidate at the end. This is essential because
# backtracking looks at this mapping to get the last pin.
self.state.mapping.pop(name, None)
self.state.mapping[name] = candidate
return []
# All candidates tried, nothing works. This criterion is a dead
# end, signal for backtracking.
return causes
def _patch_criteria(
self, incompatibilities_from_broken: list[tuple[KT, list[CT]]]
) -> bool:
# Create a new state from the last known-to-work one, and apply
# the previously gathered incompatibility information.
for k, incompatibilities in incompatibilities_from_broken:
if not incompatibilities:
continue
try:
criterion = self.state.criteria[k]
except KeyError:
continue
matches = self._p.find_matches(
identifier=k,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{k: incompatibilities},
),
)
candidates: IterableView[CT] = build_iter_view(matches)
if not candidates:
return False
incompatibilities.extend(criterion.incompatibilities)
self.state.criteria[k] = Criterion(
candidates=candidates,
information=list(criterion.information),
incompatibilities=incompatibilities,
)
return True
def _save_state(self) -> None:
"""Save states for potential rollback if optimistic backjumping fails."""
if self._save_states is None:
self._save_states = [
State(
mapping=s.mapping.copy(),
criteria=s.criteria.copy(),
backtrack_causes=s.backtrack_causes[:],
)
for s in self._states
]
def _rollback_states(self) -> None:
"""Rollback states and disable optimistic backjumping."""
self._optimistic_backjumping_ratio = 0.0
if self._save_states:
self._states = self._save_states
self._save_states = None
def _backjump(self, causes: list[RequirementInformation[RT, CT]]) -> bool:
"""Perform backjumping.
When we enter here, the stack is like this::
[ state Z ]
[ state Y ]
[ state X ]
.... earlier states are irrelevant.
1. No pins worked for Z, so it does not have a pin.
2. We want to reset state Y to unpinned, and pin another candidate.
3. State X holds what state Y was before the pin, but does not
have the incompatibility information gathered in state Y.
Each iteration of the loop will:
1. Identify Z. The incompatibility is not always caused by the latest
state. For example, given three requirements A, B and C, with
dependencies A1, B1 and C1, where A1 and B1 are incompatible: the
last state might be related to C, so we want to discard the
previous state.
2. Discard Z.
3. Discard Y but remember its incompatibility information gathered
previously, and the failure we're dealing with right now.
4. Push a new state Y' based on X, and apply the incompatibility
information from Y to Y'.
5a. If this causes Y' to conflict, we need to backtrack again. Make Y'
the new Z and go back to step 2.
5b. If the incompatibilities apply cleanly, end backtracking.
"""
incompatible_reqs: Iterable[CT | RT] = itertools.chain(
(c.parent for c in causes if c.parent is not None),
(c.requirement for c in causes),
)
incompatible_deps = {self._p.identify(r) for r in incompatible_reqs}
while len(self._states) >= 3:
# Remove the state that triggered backtracking.
del self._states[-1]
# Optimistically backtrack to a state that caused the incompatibility
broken_state = self.state
while True:
# Retrieve the last candidate pin and known incompatibilities.
try:
broken_state = self._states.pop()
name, candidate = broken_state.mapping.popitem()
except (IndexError, KeyError):
raise ResolutionImpossible(causes) from None
if (
not self._optimistic_backjumping_ratio
and name not in incompatible_deps
):
# For safe backjumping only backjump if the current dependency
# is not the same as the incompatible dependency
break
# On the first time a non-safe backjump is done the state
# is saved so we can restore it later if the resolution fails
if (
self._optimistic_backjumping_ratio
and self._save_states is None
and name not in incompatible_deps
):
self._save_state()
# If the current dependencies and the incompatible dependencies
# are overlapping then we have likely found a cause of the
# incompatibility
current_dependencies = {
self._p.identify(d) for d in self._p.get_dependencies(candidate)
}
if not current_dependencies.isdisjoint(incompatible_deps):
break
# Fallback: We should not backtrack to the point where
# broken_state.mapping is empty, so stop backtracking for
# a chance for the resolution to recover
if not broken_state.mapping:
break
incompatibilities_from_broken = [
(k, list(v.incompatibilities)) for k, v in broken_state.criteria.items()
]
# Also mark the newly known incompatibility.
incompatibilities_from_broken.append((name, [candidate]))
self._push_new_state()
success = self._patch_criteria(incompatibilities_from_broken)
# It works! Let's work on this new state.
if success:
return True
# State does not work after applying known incompatibilities.
# Try the still previous state.
# No way to backtrack anymore.
return False
def _extract_causes(
self, criteron: list[Criterion[RT, CT]]
) -> list[RequirementInformation[RT, CT]]:
"""Extract causes from list of criterion and deduplicate"""
return list({id(i): i for c in criteron for i in c.information}.values())
def resolve(self, requirements: Iterable[RT], max_rounds: int) -> State[RT, CT, KT]:
if self._states:
raise RuntimeError("already resolved")
self._r.starting()
# Initialize the root state.
self._states = [
State(
mapping=collections.OrderedDict(),
criteria={},
backtrack_causes=[],
)
]
for r in requirements:
try:
self._add_to_criteria(self.state.criteria, r, parent=None)
except RequirementsConflicted as e:
raise ResolutionImpossible(e.criterion.information) from e
# The root state is saved as a sentinel so the first ever pin can have
# something to backtrack to if it fails. The root state is basically
# pinning the virtual "root" package in the graph.
self._push_new_state()
# Variables for optimistic backjumping
optimistic_rounds_cutoff: int | None = None
optimistic_backjumping_start_round: int | None = None
for round_index in range(max_rounds):
self._r.starting_round(index=round_index)
# Handle if optimistic backjumping has been running for too long
if self._optimistic_backjumping_ratio and self._save_states is not None:
if optimistic_backjumping_start_round is None:
optimistic_backjumping_start_round = round_index
optimistic_rounds_cutoff = int(
(max_rounds - round_index) * self._optimistic_backjumping_ratio
)
if optimistic_rounds_cutoff <= 0:
self._rollback_states()
continue
elif optimistic_rounds_cutoff is not None:
if (
round_index - optimistic_backjumping_start_round
>= optimistic_rounds_cutoff
):
self._rollback_states()
continue
unsatisfied_names = [
key
for key, criterion in self.state.criteria.items()
if not self._is_current_pin_satisfying(key, criterion)
]
# All criteria are accounted for. Nothing more to pin, we are done!
if not unsatisfied_names:
self._r.ending(state=self.state)
return self.state
# keep track of satisfied names to calculate diff after pinning
satisfied_names = set(self.state.criteria.keys()) - set(unsatisfied_names)
if len(unsatisfied_names) > 1:
narrowed_unstatisfied_names = list(
self._p.narrow_requirement_selection(
identifiers=unsatisfied_names,
resolutions=self.state.mapping,
candidates=IteratorMapping(
self.state.criteria,
operator.attrgetter("candidates"),
),
information=IteratorMapping(
self.state.criteria,
operator.attrgetter("information"),
),
backtrack_causes=self.state.backtrack_causes,
)
)
else:
narrowed_unstatisfied_names = unsatisfied_names
# If there are no unsatisfied names use unsatisfied names
if not narrowed_unstatisfied_names:
raise RuntimeError("narrow_requirement_selection returned 0 names")
# If there is only 1 unsatisfied name skip calling self._get_preference
if len(narrowed_unstatisfied_names) > 1:
# Choose the most preferred unpinned criterion to try.
name = min(narrowed_unstatisfied_names, key=self._get_preference)
else:
name = narrowed_unstatisfied_names[0]
failure_criterion = self._attempt_to_pin_criterion(name)
if failure_criterion:
causes = self._extract_causes(failure_criterion)
# Backjump if pinning fails. The backjump process puts us in
# an unpinned state, so we can work on it in the next round.
self._r.resolving_conflicts(causes=causes)
try:
success = self._backjump(causes)
except ResolutionImpossible:
if self._optimistic_backjumping_ratio and self._save_states:
failed_optimistic_backjumping = True
else:
raise
else:
failed_optimistic_backjumping = bool(
not success
and self._optimistic_backjumping_ratio
and self._save_states
)
if failed_optimistic_backjumping and self._save_states:
self._rollback_states()
else:
self.state.backtrack_causes[:] = causes
# Dead ends everywhere. Give up.
if not success:
raise ResolutionImpossible(self.state.backtrack_causes)
else:
# discard as information sources any invalidated names
# (unsatisfied names that were previously satisfied)
newly_unsatisfied_names = {
key
for key, criterion in self.state.criteria.items()
if key in satisfied_names
and not self._is_current_pin_satisfying(key, criterion)
}
self._remove_information_from_criteria(
self.state.criteria, newly_unsatisfied_names
)
# Pinning was successful. Push a new state to do another pin.
self._push_new_state()
self._r.ending_round(index=round_index, state=self.state)
raise ResolutionTooDeep(max_rounds)
class Resolver(AbstractResolver[RT, CT, KT]):
"""The thing that performs the actual resolution work."""
base_exception = ResolverException
def resolve( # type: ignore[override]
self,
requirements: Iterable[RT],
max_rounds: int = 100,
) -> Result[RT, CT, KT]:
"""Take a collection of constraints, spit out the resolution result.
The return value is a representation to the final resolution result. It
is a tuple subclass with three public members:
* `mapping`: A dict of resolved candidates. Each key is an identifier
of a requirement (as returned by the provider's `identify` method),
and the value is the resolved candidate.
* `graph`: A `DirectedGraph` instance representing the dependency tree.
The vertices are keys of `mapping`, and each edge represents *why*
a particular package is included. A special vertex `None` is
included to represent parents of user-supplied requirements.
* `criteria`: A dict of "criteria" that hold detailed information on
how edges in the graph are derived. Each key is an identifier of a
requirement, and the value is a `Criterion` instance.
The following exceptions may be raised if a resolution cannot be found:
* `ResolutionImpossible`: A resolution cannot be found for the given
combination of requirements. The `causes` attribute of the
exception is a list of (requirement, parent), giving the
requirements that could not be satisfied.
* `ResolutionTooDeep`: The dependency tree is too deeply nested and
the resolver gave up. This is usually caused by a circular
dependency, but you can try to resolve this by increasing the
`max_rounds` argument.
"""
resolution = Resolution(self.provider, self.reporter)
state = resolution.resolve(requirements, max_rounds=max_rounds)
return _build_result(state)
def _has_route_to_root(
criteria: Mapping[KT, Criterion[RT, CT]],
key: KT | None,
all_keys: dict[int, KT | None],
connected: set[KT | None],
) -> bool:
if key in connected:
return True
if key not in criteria:
return False
assert key is not None
for p in criteria[key].iter_parent():
try:
pkey = all_keys[id(p)]
except KeyError:
continue
if pkey in connected:
connected.add(key)
return True
if _has_route_to_root(criteria, pkey, all_keys, connected):
connected.add(key)
return True
return False

View File

@@ -0,0 +1,209 @@
from __future__ import annotations
import itertools
from collections import namedtuple
from typing import (
TYPE_CHECKING,
Callable,
Generic,
Iterable,
Iterator,
Mapping,
NamedTuple,
Sequence,
TypeVar,
Union,
)
KT = TypeVar("KT") # Identifier.
RT = TypeVar("RT") # Requirement.
CT = TypeVar("CT") # Candidate.
Matches = Union[Iterable[CT], Callable[[], Iterable[CT]]]
if TYPE_CHECKING:
from .resolvers.criterion import Criterion
class RequirementInformation(NamedTuple, Generic[RT, CT]):
requirement: RT
parent: CT | None
class State(NamedTuple, Generic[RT, CT, KT]):
"""Resolution state in a round."""
mapping: dict[KT, CT]
criteria: dict[KT, Criterion[RT, CT]]
backtrack_causes: list[RequirementInformation[RT, CT]]
else:
RequirementInformation = namedtuple(
"RequirementInformation", ["requirement", "parent"]
)
State = namedtuple("State", ["mapping", "criteria", "backtrack_causes"])
class DirectedGraph(Generic[KT]):
"""A graph structure with directed edges."""
def __init__(self) -> None:
self._vertices: set[KT] = set()
self._forwards: dict[KT, set[KT]] = {} # <key> -> Set[<key>]
self._backwards: dict[KT, set[KT]] = {} # <key> -> Set[<key>]
def __iter__(self) -> Iterator[KT]:
return iter(self._vertices)
def __len__(self) -> int:
return len(self._vertices)
def __contains__(self, key: KT) -> bool:
return key in self._vertices
def copy(self) -> DirectedGraph[KT]:
"""Return a shallow copy of this graph."""
other = type(self)()
other._vertices = set(self._vertices)
other._forwards = {k: set(v) for k, v in self._forwards.items()}
other._backwards = {k: set(v) for k, v in self._backwards.items()}
return other
def add(self, key: KT) -> None:
"""Add a new vertex to the graph."""
if key in self._vertices:
raise ValueError("vertex exists")
self._vertices.add(key)
self._forwards[key] = set()
self._backwards[key] = set()
def remove(self, key: KT) -> None:
"""Remove a vertex from the graph, disconnecting all edges from/to it."""
self._vertices.remove(key)
for f in self._forwards.pop(key):
self._backwards[f].remove(key)
for t in self._backwards.pop(key):
self._forwards[t].remove(key)
def connected(self, f: KT, t: KT) -> bool:
return f in self._backwards[t] and t in self._forwards[f]
def connect(self, f: KT, t: KT) -> None:
"""Connect two existing vertices.
Nothing happens if the vertices are already connected.
"""
if t not in self._vertices:
raise KeyError(t)
self._forwards[f].add(t)
self._backwards[t].add(f)
def iter_edges(self) -> Iterator[tuple[KT, KT]]:
for f, children in self._forwards.items():
for t in children:
yield f, t
def iter_children(self, key: KT) -> Iterator[KT]:
return iter(self._forwards[key])
def iter_parents(self, key: KT) -> Iterator[KT]:
return iter(self._backwards[key])
class IteratorMapping(Mapping[KT, Iterator[CT]], Generic[RT, CT, KT]):
def __init__(
self,
mapping: Mapping[KT, RT],
accessor: Callable[[RT], Iterable[CT]],
appends: Mapping[KT, Iterable[CT]] | None = None,
) -> None:
self._mapping = mapping
self._accessor = accessor
self._appends: Mapping[KT, Iterable[CT]] = appends or {}
def __repr__(self) -> str:
return "IteratorMapping({!r}, {!r}, {!r})".format(
self._mapping,
self._accessor,
self._appends,
)
def __bool__(self) -> bool:
return bool(self._mapping or self._appends)
def __contains__(self, key: object) -> bool:
return key in self._mapping or key in self._appends
def __getitem__(self, k: KT) -> Iterator[CT]:
try:
v = self._mapping[k]
except KeyError:
return iter(self._appends[k])
return itertools.chain(self._accessor(v), self._appends.get(k, ()))
def __iter__(self) -> Iterator[KT]:
more = (k for k in self._appends if k not in self._mapping)
return itertools.chain(self._mapping, more)
def __len__(self) -> int:
more = sum(1 for k in self._appends if k not in self._mapping)
return len(self._mapping) + more
class _FactoryIterableView(Iterable[RT]):
"""Wrap an iterator factory returned by `find_matches()`.
Calling `iter()` on this class would invoke the underlying iterator
factory, making it a "collection with ordering" that can be iterated
through multiple times, but lacks random access methods presented in
built-in Python sequence types.
"""
def __init__(self, factory: Callable[[], Iterable[RT]]) -> None:
self._factory = factory
self._iterable: Iterable[RT] | None = None
def __repr__(self) -> str:
return f"{type(self).__name__}({list(self)})"
def __bool__(self) -> bool:
try:
next(iter(self))
except StopIteration:
return False
return True
def __iter__(self) -> Iterator[RT]:
iterable = self._factory() if self._iterable is None else self._iterable
self._iterable, current = itertools.tee(iterable)
return current
class _SequenceIterableView(Iterable[RT]):
"""Wrap an iterable returned by find_matches().
This is essentially just a proxy to the underlying sequence that provides
the same interface as `_FactoryIterableView`.
"""
def __init__(self, sequence: Sequence[RT]):
self._sequence = sequence
def __repr__(self) -> str:
return f"{type(self).__name__}({self._sequence})"
def __bool__(self) -> bool:
return bool(self._sequence)
def __iter__(self) -> Iterator[RT]:
return iter(self._sequence)
def build_iter_view(matches: Matches[CT]) -> Iterable[CT]:
"""Build an iterable view from the value returned by `find_matches()`."""
if callable(matches):
return _FactoryIterableView(matches)
if not isinstance(matches, Sequence):
matches = list(matches)
return _SequenceIterableView(matches)
IterableView = Iterable