login page

This commit is contained in:
Alicja Cięciwa
2020-10-27 12:57:58 +01:00
commit cb8886666c
8545 changed files with 1082463 additions and 0 deletions

View File

@@ -0,0 +1,214 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import, unicode_literals
import copy
import hashlib
import json
import os
import sys
import appdirs
import pip_shims
import requests
import vistir
from ..internals._pip_shims import VCS_SUPPORT
from ..internals.utils import get_pinned_version
CACHE_DIR = os.environ.get("PASSA_CACHE_DIR", appdirs.user_cache_dir("passa"))
class HashCache(pip_shims.SafeFileCache):
"""Caches hashes of PyPI artifacts so we do not need to re-download them.
Hashes are only cached when the URL appears to contain a hash in it and the
cache key includes the hash value returned from the server). This ought to
avoid ssues where the location on the server changes.
"""
def __init__(self, *args, **kwargs):
session = kwargs.pop('session', requests.session())
self.session = session
kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache'))
super(HashCache, self).__init__(*args, **kwargs)
def get_hash(self, location):
# If there is no location hash (i.e., md5, sha256, etc.), we don't want
# to store it.
hash_value = None
orig_scheme = location.scheme
new_location = copy.deepcopy(location)
if orig_scheme in VCS_SUPPORT.all_schemes:
new_location.url = new_location.url.split("+", 1)[-1]
can_hash = new_location.hash
if can_hash:
# hash url WITH fragment
hash_value = self.get(new_location.url)
if not hash_value:
hash_value = self._get_file_hash(new_location)
hash_value = hash_value.encode('utf8')
if can_hash:
self.set(new_location.url, hash_value)
return hash_value.decode('utf8')
def _get_file_hash(self, location):
h = hashlib.new(pip_shims.FAVORITE_HASH)
with vistir.open_file(location, self.session) as fp:
for chunk in iter(lambda: fp.read(8096), b""):
h.update(chunk)
return ":".join([h.name, h.hexdigest()])
# pip-tools's dependency cache implementation.
class CorruptCacheError(Exception):
def __init__(self, path):
self.path = path
def __str__(self):
lines = [
'The dependency cache seems to have been corrupted.',
'Inspect, or delete, the following file:',
' {}'.format(self.path),
]
return os.linesep.join(lines)
def _key_from_req(req):
"""Get an all-lowercase version of the requirement's name."""
if hasattr(req, 'key'):
# from pkg_resources, such as installed dists for pip-sync
key = req.key
else:
# from packaging, such as install requirements from requirements.txt
key = req.name
key = key.replace('_', '-').lower()
return key
def _read_cache_file(cache_file_path):
with open(cache_file_path, 'r') as cache_file:
try:
doc = json.load(cache_file)
except ValueError:
raise CorruptCacheError(cache_file_path)
# Check version and load the contents
assert doc['__format__'] == 1, 'Unknown cache file format'
return doc['dependencies']
class _JSONCache(object):
"""A persistent cache backed by a JSON file.
The cache file is written to the appropriate user cache dir for the
current platform, i.e.
~/.cache/pip-tools/depcache-pyX.Y.json
Where X.Y indicates the Python version.
"""
filename_format = None
def __init__(self, cache_dir=CACHE_DIR):
vistir.mkdir_p(cache_dir)
python_version = ".".join(str(digit) for digit in sys.version_info[:2])
cache_filename = self.filename_format.format(
python_version=python_version,
)
self._cache_file = os.path.join(cache_dir, cache_filename)
self._cache = None
@property
def cache(self):
"""The dictionary that is the actual in-memory cache.
This property lazily loads the cache from disk.
"""
if self._cache is None:
self.read_cache()
return self._cache
def as_cache_key(self, ireq):
"""Given a requirement, return its cache key.
This behavior is a little weird in order to allow backwards
compatibility with cache files. For a requirement without extras, this
will return, for example::
("ipython", "2.1.0")
For a requirement with extras, the extras will be comma-separated and
appended to the version, inside brackets, like so::
("ipython", "2.1.0[nbconvert,notebook]")
"""
extras = tuple(sorted(ireq.extras))
if not extras:
extras_string = ""
else:
extras_string = "[{}]".format(",".join(extras))
name = _key_from_req(ireq.req)
version = get_pinned_version(ireq)
return name, "{}{}".format(version, extras_string)
def read_cache(self):
"""Reads the cached contents into memory.
"""
if os.path.exists(self._cache_file):
self._cache = _read_cache_file(self._cache_file)
else:
self._cache = {}
def write_cache(self):
"""Writes the cache to disk as JSON.
"""
doc = {
'__format__': 1,
'dependencies': self._cache,
}
with open(self._cache_file, 'w') as f:
json.dump(doc, f, sort_keys=True)
def clear(self):
self._cache = {}
self.write_cache()
def __contains__(self, ireq):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
return pkgversion_and_extras in self.cache.get(pkgname, {})
def __getitem__(self, ireq):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
return self.cache[pkgname][pkgversion_and_extras]
def __setitem__(self, ireq, values):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
self.cache.setdefault(pkgname, {})
self.cache[pkgname][pkgversion_and_extras] = values
self.write_cache()
def __delitem__(self, ireq):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
try:
del self.cache[pkgname][pkgversion_and_extras]
except KeyError:
return
self.write_cache()
def get(self, ireq, default=None):
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default)
class DependencyCache(_JSONCache):
"""Cache the dependency of cancidates.
"""
filename_format = "depcache-py{python_version}.json"
class RequiresPythonCache(_JSONCache):
"""Cache a candidate's Requires-Python information.
"""
filename_format = "pyreqcache-py{python_version}.json"

View File

@@ -0,0 +1,214 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import, unicode_literals
import itertools
import resolvelib
import plette
import requirementslib
import vistir
from ..internals.hashes import get_hashes
from ..internals.reporters import StdOutReporter
from ..internals.traces import trace_graph
from ..internals.utils import identify_requirment
from .caches import HashCache
from .metadata import set_metadata
from .providers import BasicProvider, EagerUpgradeProvider, PinReuseProvider
def _get_requirements(model, section_name):
"""Produce a mapping of identifier: requirement from the section.
"""
if not model:
return {}
return {identify_requirment(r): r for r in (
requirementslib.Requirement.from_pipfile(name, package._data)
for name, package in model.get(section_name, {}).items()
)}
def _get_requires_python(pipfile):
try:
requires = pipfile.requires
except AttributeError:
return ""
try:
return requires.python_full_version
except AttributeError:
pass
try:
return requires.python_version
except AttributeError:
return ""
def _collect_derived_entries(state, traces, identifiers):
"""Produce a mapping containing all candidates derived from `identifiers`.
`identifiers` should provide a collection of requirement identifications
from a section (i.e. `packages` or `dev-packages`). This function uses
`trace` to filter out candidates in the state that are present because of
an entry in that collection.
"""
identifiers = set(identifiers)
if not identifiers:
return {}
entries = {}
extras = {}
for identifier, requirement in state.mapping.items():
routes = {trace[1] for trace in traces[identifier] if len(trace) > 1}
if identifier not in identifiers and not (identifiers & routes):
continue
name = requirement.normalized_name
if requirement.extras:
# Aggregate extras from multiple routes so we can produce their
# union in the lock file. (sarugaku/passa#24)
try:
extras[name].extend(requirement.extras)
except KeyError:
extras[name] = list(requirement.extras)
entries[name] = next(iter(requirement.as_pipfile().values()))
for name, ext in extras.items():
entries[name]["extras"] = ext
return entries
class AbstractLocker(object):
"""Helper class to produce a new lock file for a project.
This is not intended for instantiation. You should use one of its concrete
subclasses instead. The class contains logic to:
* Prepare a project for locking
* Perform the actually resolver invocation
* Convert resolver output into lock file format
* Update the project to have the new lock file
"""
def __init__(self, project):
self.project = project
self.default_requirements = _get_requirements(
project.pipfile, "packages",
)
self.develop_requirements = _get_requirements(
project.pipfile, "dev-packages",
)
# This comprehension dance ensures we merge packages from both
# sections, and definitions in the default section win.
self.requirements = {k: r for k, r in itertools.chain(
self.develop_requirements.items(),
self.default_requirements.items(),
)}.values()
self.sources = [s._data.copy() for s in project.pipfile.sources]
self.allow_prereleases = bool(
project.pipfile.get("pipenv", {}).get("allow_prereleases", False),
)
self.requires_python = _get_requires_python(project.pipfile)
def __repr__(self):
return "<{0} @ {1!r}>".format(type(self).__name__, self.project.root)
def get_provider(self):
raise NotImplementedError
def get_reporter(self):
# TODO: Build SpinnerReporter, and use this only in verbose mode.
return StdOutReporter(self.requirements)
def lock(self):
"""Lock specified (abstract) requirements into (concrete) candidates.
The locking procedure consists of four stages:
* Resolve versions and dependency graph (powered by ResolveLib).
* Walk the graph to determine "why" each candidate came to be, i.e.
what top-level requirements result in a given candidate.
* Populate hashes for resolved candidates.
* Populate markers based on dependency specifications of each
candidate, and the dependency graph.
"""
provider = self.get_provider()
reporter = self.get_reporter()
resolver = resolvelib.Resolver(provider, reporter)
with vistir.cd(self.project.root):
state = resolver.resolve(self.requirements)
traces = trace_graph(state.graph)
hash_cache = HashCache()
for r in state.mapping.values():
if not r.hashes:
r.hashes = get_hashes(hash_cache, r)
set_metadata(
state.mapping, traces,
provider.fetched_dependencies,
provider.collected_requires_pythons,
)
lockfile = plette.Lockfile.with_meta_from(self.project.pipfile)
lockfile["default"] = _collect_derived_entries(
state, traces, self.default_requirements,
)
lockfile["develop"] = _collect_derived_entries(
state, traces, self.develop_requirements,
)
self.project.lockfile = lockfile
class BasicLocker(AbstractLocker):
"""Basic concrete locker.
This takes a project, generates a lock file from its Pipfile, and sets
the lock file property to the project.
"""
def get_provider(self):
return BasicProvider(
self.requirements, self.sources,
self.requires_python, self.allow_prereleases,
)
class PinReuseLocker(AbstractLocker):
"""A specialized locker to handle re-locking based on existing pins.
See :class:`.providers.PinReuseProvider` for more information.
"""
def __init__(self, project):
super(PinReuseLocker, self).__init__(project)
pins = _get_requirements(project.lockfile, "develop")
pins.update(_get_requirements(project.lockfile, "default"))
for pin in pins.values():
pin.markers = None
self.preferred_pins = pins
def get_provider(self):
return PinReuseProvider(
self.preferred_pins, self.requirements, self.sources,
self.requires_python, self.allow_prereleases,
)
class EagerUpgradeLocker(PinReuseLocker):
"""A specialized locker to handle the "eager" upgrade strategy.
See :class:`.providers.EagerUpgradeProvider` for more
information.
"""
def __init__(self, tracked_names, *args, **kwargs):
super(EagerUpgradeLocker, self).__init__(*args, **kwargs)
self.tracked_names = tracked_names
def get_provider(self):
return EagerUpgradeProvider(
self.tracked_names, self.preferred_pins,
self.requirements, self.sources,
self.requires_python, self.allow_prereleases,
)

View File

@@ -0,0 +1,169 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import, unicode_literals
import copy
import itertools
import packaging.markers
import packaging.specifiers
import vistir
import vistir.misc
from ..internals.markers import get_without_extra
from ..internals.specifiers import cleanup_pyspecs, pyspec_from_markers
def dedup_markers(s):
# TODO: Implement better logic.
deduped = sorted(vistir.misc.dedup(s))
return deduped
class MetaSet(object):
"""Representation of a "metadata set".
This holds multiple metadata representaions. Each metadata representation
includes a marker, and a specifier set of Python versions required.
"""
def __init__(self):
self.markerset = frozenset()
self.pyspecset = packaging.specifiers.SpecifierSet()
def __repr__(self):
return "MetaSet(markerset={0!r}, pyspecset={1!r})".format(
",".join(sorted(self.markerset)), str(self.pyspecset),
)
def __str__(self):
pyspecs = set()
markerset = set()
for m in self.markerset:
marker_specs = pyspec_from_markers(packaging.markers.Marker(m))
if marker_specs:
pyspecs.add(marker_specs)
else:
markerset.add(m)
if pyspecs:
self.pyspecset._specs &= pyspecs
self.markerset = frozenset(markerset)
return " and ".join(dedup_markers(itertools.chain(
# Make sure to always use the same quotes so we can dedup properly.
(
"{0}".format(ms) if " or " in ms else ms
for ms in (str(m).replace('"', "'") for m in self.markerset)
),
(
"python_version {0[0]} '{0[1]}'".format(spec)
for spec in cleanup_pyspecs(self.pyspecset)
),
)))
def __bool__(self):
return bool(self.markerset or self.pyspecset)
def __nonzero__(self): # Python 2.
return self.__bool__()
def __or__(self, pair):
marker, specset = pair
markerset = set(self.markerset)
if marker:
marker_specs = pyspec_from_markers(marker)
if not marker_specs:
markerset.add(str(marker))
else:
specset._specs &= marker_specs
metaset = MetaSet()
metaset.markerset = frozenset(markerset)
# TODO: Implement some logic to clean up dups like '3.0.*' and '3.0'.
metaset.pyspecset &= self.pyspecset & specset
return metaset
def _build_metasets(dependencies, pythons, key, trace, all_metasets):
all_parent_metasets = []
for route in trace:
parent = route[-1]
try:
parent_metasets = all_metasets[parent]
except KeyError: # Parent not calculated yet. Wait for it.
return
all_parent_metasets.append((parent, parent_metasets))
metaset_iters = []
for parent, parent_metasets in all_parent_metasets:
r = dependencies[parent][key]
python = pythons[key]
metaset = (
get_without_extra(r.markers),
packaging.specifiers.SpecifierSet(python),
)
metaset_iters.append(
parent_metaset | metaset
for parent_metaset in parent_metasets
)
return list(itertools.chain.from_iterable(metaset_iters))
def _calculate_metasets_mapping(dependencies, pythons, traces):
all_metasets = {None: [MetaSet()]}
del traces[None]
while traces:
new_metasets = {}
for key, trace in traces.items():
assert key not in all_metasets, key # Sanity check for debug.
metasets = _build_metasets(
dependencies, pythons, key, trace, all_metasets,
)
if metasets is None:
continue
new_metasets[key] = metasets
if not new_metasets:
break # No progress? Deadlocked. Give up.
all_metasets.update(new_metasets)
for key in new_metasets:
del traces[key]
return all_metasets
def _format_metasets(metasets):
# If there is an unconditional route, this needs to be unconditional.
if not metasets or not all(metasets):
return None
# This extra str(Marker()) call helps simplify the expression.
return str(packaging.markers.Marker(" or ".join(
"{0}".format(s) if " and " in s else s
for s in dedup_markers(str(metaset) for metaset in metasets
if metaset)
)))
def set_metadata(candidates, traces, dependencies, pythons):
"""Add "metadata" to candidates based on the dependency tree.
Metadata for a candidate includes markers and a specifier for Python
version requirements.
:param candidates: A key-candidate mapping. Candidates in the mapping will
have their markers set.
:param traces: A graph trace (produced by `traces.trace_graph`) providing
information about dependency relationships between candidates.
:param dependencies: A key-collection mapping containing what dependencies
each candidate in `candidates` requested.
:param pythons: A key-str mapping containing Requires-Python information
of each candidate.
Keys in mappings and entries in the trace are identifiers of a package, as
implemented by the `identify` method of the resolver's provider.
The candidates are modified in-place.
"""
metasets_mapping = _calculate_metasets_mapping(
dependencies, pythons, copy.deepcopy(traces),
)
for key, candidate in candidates.items():
candidate.markers = _format_metasets(metasets_mapping[key])

View File

@@ -0,0 +1,241 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import, unicode_literals
import collections
import io
import os
from pipenv.vendor import attr
import packaging.markers
import packaging.utils
import plette
import plette.models
import six
import tomlkit
SectionDifference = collections.namedtuple("SectionDifference", [
"inthis", "inthat",
])
FileDifference = collections.namedtuple("FileDifference", [
"default", "develop",
])
def _are_pipfile_entries_equal(a, b):
a = {k: v for k, v in a.items() if k not in ("markers", "hashes", "hash")}
b = {k: v for k, v in b.items() if k not in ("markers", "hashes", "hash")}
if a != b:
return False
try:
marker_eval_a = packaging.markers.Marker(a["markers"]).evaluate()
except (AttributeError, KeyError, TypeError, ValueError):
marker_eval_a = True
try:
marker_eval_b = packaging.markers.Marker(b["markers"]).evaluate()
except (AttributeError, KeyError, TypeError, ValueError):
marker_eval_b = True
return marker_eval_a == marker_eval_b
DEFAULT_NEWLINES = "\n"
def preferred_newlines(f):
if isinstance(f.newlines, six.text_type):
return f.newlines
return DEFAULT_NEWLINES
@attr.s
class ProjectFile(object):
"""A file in the Pipfile project.
"""
location = attr.ib()
line_ending = attr.ib()
model = attr.ib()
@classmethod
def read(cls, location, model_cls, invalid_ok=False):
try:
with io.open(location, encoding="utf-8") as f:
model = model_cls.load(f)
line_ending = preferred_newlines(f)
except Exception:
if not invalid_ok:
raise
model = None
line_ending = DEFAULT_NEWLINES
return cls(location=location, line_ending=line_ending, model=model)
def write(self):
kwargs = {"encoding": "utf-8", "newline": self.line_ending}
with io.open(self.location, "w", **kwargs) as f:
self.model.dump(f)
def dumps(self):
strio = six.StringIO()
self.model.dump(strio)
return strio.getvalue()
@attr.s
class Project(object):
root = attr.ib()
_p = attr.ib(init=False)
_l = attr.ib(init=False)
def __attrs_post_init__(self):
self.root = root = os.path.abspath(self.root)
self._p = ProjectFile.read(
os.path.join(root, "Pipfile"),
plette.Pipfile,
)
self._l = ProjectFile.read(
os.path.join(root, "Pipfile.lock"),
plette.Lockfile,
invalid_ok=True,
)
@property
def pipfile(self):
return self._p.model
@property
def pipfile_location(self):
return self._p.location
@property
def lockfile(self):
return self._l.model
@property
def lockfile_location(self):
return self._l.location
@lockfile.setter
def lockfile(self, new):
self._l.model = new
def is_synced(self):
return self.lockfile and self.lockfile.is_up_to_date(self.pipfile)
def _get_pipfile_section(self, develop, insert=True):
name = "dev-packages" if develop else "packages"
try:
section = self.pipfile[name]
except KeyError:
section = plette.models.PackageCollection(tomlkit.table())
if insert:
self.pipfile[name] = section
return section
def contains_key_in_pipfile(self, key):
sections = [
self._get_pipfile_section(develop=False, insert=False),
self._get_pipfile_section(develop=True, insert=False),
]
return any(
(packaging.utils.canonicalize_name(name) ==
packaging.utils.canonicalize_name(key))
for section in sections
for name in section
)
def add_line_to_pipfile(self, line, develop):
from requirementslib import Requirement
requirement = Requirement.from_line(line)
section = self._get_pipfile_section(develop=develop)
key = requirement.normalized_name
entry = next(iter(requirement.as_pipfile().values()))
if isinstance(entry, dict):
# HACK: TOMLKit prefers to expand tables by default, but we
# always want inline tables here. Also tomlkit.inline_table
# does not have `update()`.
table = tomlkit.inline_table()
for k, v in entry.items():
table[k] = v
entry = table
section[key] = entry
def remove_keys_from_pipfile(self, keys, default, develop):
keys = {packaging.utils.canonicalize_name(key) for key in keys}
sections = []
if default:
sections.append(self._get_pipfile_section(
develop=False, insert=False,
))
if develop:
sections.append(self._get_pipfile_section(
develop=True, insert=False,
))
for section in sections:
removals = set()
for name in section:
if packaging.utils.canonicalize_name(name) in keys:
removals.add(name)
for key in removals:
del section._data[key]
def remove_keys_from_lockfile(self, keys):
keys = {packaging.utils.canonicalize_name(key) for key in keys}
removed = False
for section_name in ("default", "develop"):
try:
section = self.lockfile[section_name]
except KeyError:
continue
removals = set()
for name in section:
if packaging.utils.canonicalize_name(name) in keys:
removals.add(name)
removed = removed or bool(removals)
for key in removals:
del section._data[key]
if removed:
# HACK: The lock file no longer represents the Pipfile at this
# point. Set the hash to an arbitrary invalid value.
self.lockfile.meta.hash = plette.models.Hash({"__invalid__": ""})
def difference_lockfile(self, lockfile):
"""Generate a difference between the current and given lockfiles.
Returns a 2-tuple containing differences in default in develop
sections.
Each element is a 2-tuple of dicts. The first, `inthis`, contains
entries only present in the current lockfile; the second, `inthat`,
contains entries only present in the given one.
If a key exists in both this and that, but the values differ, the key
is present in both dicts, pointing to values from each file.
"""
diff_data = {
"default": SectionDifference({}, {}),
"develop": SectionDifference({}, {}),
}
for section_name, section_diff in diff_data.items():
try:
this = self.lockfile[section_name]._data
except (KeyError, TypeError):
this = {}
try:
that = lockfile[section_name]._data
except (KeyError, TypeError):
that = {}
for key, this_value in this.items():
try:
that_value = that[key]
except KeyError:
section_diff.inthis[key] = this_value
continue
if not _are_pipfile_entries_equal(this_value, that_value):
section_diff.inthis[key] = this_value
section_diff.inthat[key] = that_value
for key, that_value in that.items():
if key not in this:
section_diff.inthat[key] = that_value
return FileDifference(**diff_data)

View File

@@ -0,0 +1,198 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import os
import resolvelib
from ..internals.candidates import find_candidates
from ..internals.dependencies import get_dependencies
from ..internals.utils import (
filter_sources, get_allow_prereleases, identify_requirment, strip_extras,
)
PROTECTED_PACKAGE_NAMES = {"pip", "setuptools"}
class BasicProvider(resolvelib.AbstractProvider):
"""Provider implementation to interface with `requirementslib.Requirement`.
"""
def __init__(self, root_requirements, sources,
requires_python, allow_prereleases):
self.sources = sources
self.requires_python = requires_python
self.allow_prereleases = bool(allow_prereleases)
self.invalid_candidates = set()
# Remember requirements of each pinned candidate. The resolver calls
# `get_dependencies()` only when it wants to repin, so the last time
# the dependencies we got when it is last called on a package, are
# the set used by the resolver. We use this later to trace how a given
# dependency is specified by a package.
self.fetched_dependencies = {None: {
self.identify(r): r for r in root_requirements
}}
# Should Pipfile's requires.python_[full_]version be included?
self.collected_requires_pythons = {None: ""}
def identify(self, dependency):
return identify_requirment(dependency)
def get_preference(self, resolution, candidates, information):
# TODO: Provide better sorting logic. This simply resolve the ones with
# less choices first. Not sophisticated, but sounds reasonable?
return len(candidates)
def find_matches(self, requirement):
sources = filter_sources(requirement, self.sources)
candidates = find_candidates(
requirement, sources, self.requires_python,
get_allow_prereleases(requirement, self.allow_prereleases),
)
return candidates
def is_satisfied_by(self, requirement, candidate):
# A non-named requirement has exactly one candidate, as implemented in
# `find_matches()`. Since pip does not yet implement URL based lookup
# (PEP 508) yet, it must match unless there are duplicated entries in
# Pipfile. If there is, the user takes the blame. (sarugaku/passa#34)
if not requirement.is_named:
return True
# A non-named candidate can only come from a non-named requirement,
# which, since pip does not implement URL based lookup (PEP 508) yet,
# can only come from Pipfile. Assume the user knows what they're doing,
# and use it without checking. (sarugaku/passa#34)
if not candidate.is_named:
return True
# Optimization: Everything matches if there are no specifiers.
if not requirement.specifiers:
return True
# We can't handle old version strings before PEP 440. Drop them all.
# Practically this shouldn't be a problem if the user is specifying a
# remotely reasonable dependency not from before 2013.
candidate_line = candidate.as_line(include_hashes=False)
if candidate_line in self.invalid_candidates:
return False
try:
version = candidate.get_specifier().version
except (TypeError, ValueError):
print('ignoring invalid version from {!r}'.format(candidate_line))
self.invalid_candidates.add(candidate_line)
return False
return requirement.as_ireq().specifier.contains(version)
def get_dependencies(self, candidate):
sources = filter_sources(candidate, self.sources)
try:
dependencies, requires_python = get_dependencies(
candidate, sources=sources,
)
except Exception as e:
if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"):
raise
print("failed to get dependencies for {0!r}: {1}".format(
candidate.as_line(include_hashes=False), e,
))
dependencies = []
requires_python = ""
# Exclude protected packages from the list. This prevents those
# packages from being locked, unless the user is actually working on
# them, and explicitly lists them as top-level requirements -- those
# packages are not added via this code path. (sarugaku/passa#15)
dependencies = [
dependency for dependency in dependencies
if dependency.normalized_name not in PROTECTED_PACKAGE_NAMES
]
if candidate.extras:
# HACK: If this candidate has extras, add the original candidate
# (same pinned version, no extras) as its dependency. This ensures
# the same package with different extras (treated as distinct by
# the resolver) have the same version. (sarugaku/passa#4)
dependencies.append(strip_extras(candidate))
candidate_key = self.identify(candidate)
self.fetched_dependencies[candidate_key] = {
self.identify(r): r for r in dependencies
}
self.collected_requires_pythons[candidate_key] = requires_python
return dependencies
class PinReuseProvider(BasicProvider):
"""A provider that reuses preferred pins if possible.
This is used to implement "add", "remove", and "only-if-needed upgrade",
where already-pinned candidates in Pipfile.lock should be preferred.
"""
def __init__(self, preferred_pins, *args, **kwargs):
super(PinReuseProvider, self).__init__(*args, **kwargs)
self.preferred_pins = preferred_pins
def find_matches(self, requirement):
candidates = super(PinReuseProvider, self).find_matches(requirement)
try:
# Add the preferred pin. Remember the resolve prefer candidates
# at the end of the list, so the most preferred should be last.
candidates.append(self.preferred_pins[self.identify(requirement)])
except KeyError:
pass
return candidates
class EagerUpgradeProvider(PinReuseProvider):
"""A specialized provider to handle an "eager" upgrade strategy.
An eager upgrade tries to upgrade not only packages specified, but also
their dependencies (recursively). This contrasts to the "only-if-needed"
default, which only promises to upgrade the specified package, and
prevents touching anything else if at all possible.
The provider is implemented as to keep track of all dependencies of the
specified packages to upgrade, and free their pins when it has a chance.
"""
def __init__(self, tracked_names, *args, **kwargs):
super(EagerUpgradeProvider, self).__init__(*args, **kwargs)
self.tracked_names = set(tracked_names)
for name in tracked_names:
self.preferred_pins.pop(name, None)
# HACK: Set this special flag to distinguish preferred pins from
# regular, to tell the resolver to NOT use them for tracked packages.
for pin in self.preferred_pins.values():
pin._preferred_by_provider = True
def is_satisfied_by(self, requirement, candidate):
# If this is a tracking package, tell the resolver out of using the
# preferred pin, and into a "normal" candidate selection process.
if (self.identify(requirement) in self.tracked_names and
getattr(candidate, "_preferred_by_provider", False)):
return False
return super(EagerUpgradeProvider, self).is_satisfied_by(
requirement, candidate,
)
def get_dependencies(self, candidate):
# If this package is being tracked for upgrade, remove pins of its
# dependencies, and start tracking these new packages.
dependencies = super(EagerUpgradeProvider, self).get_dependencies(
candidate,
)
if self.identify(candidate) in self.tracked_names:
for dependency in dependencies:
name = self.identify(dependency)
self.tracked_names.add(name)
self.preferred_pins.pop(name, None)
return dependencies
def get_preference(self, resolution, candidates, information):
# Resolve tracking packages so we have a chance to unpin them first.
name = self.identify(candidates[0])
if name in self.tracked_names:
return -1
return len(candidates)

View File

@@ -0,0 +1,214 @@
# -*- coding=utf-8 -*-
from __future__ import absolute_import, unicode_literals
import collections
import contextlib
import os
import sys
import sysconfig
import pkg_resources
import packaging.markers
import packaging.version
import requirementslib
from ..internals._pip import uninstall, EditableInstaller, WheelInstaller
def _is_installation_local(name):
"""Check whether the distribution is in the current Python installation.
This is used to distinguish packages seen by a virtual environment. A venv
may be able to see global packages, but we don't want to mess with them.
"""
loc = os.path.normcase(pkg_resources.working_set.by_key[name].location)
pre = os.path.normcase(sys.prefix)
return os.path.commonprefix([loc, pre]) == pre
def _is_up_to_date(distro, version):
# This is done in strings to avoid type mismatches caused by vendering.
return str(version) == str(packaging.version.parse(distro.version))
GroupCollection = collections.namedtuple("GroupCollection", [
"uptodate", "outdated", "noremove", "unneeded",
])
def _group_installed_names(packages):
"""Group locally installed packages based on given specifications.
`packages` is a name-package mapping that are used as baseline to
determine how the installed package should be grouped.
Returns a 3-tuple of disjoint sets, all containing names of installed
packages:
* `uptodate`: These match the specifications.
* `outdated`: These installations are specified, but don't match the
specifications in `packages`.
* `unneeded`: These are installed, but not specified in `packages`.
"""
groupcoll = GroupCollection(set(), set(), set(), set())
for distro in pkg_resources.working_set:
name = distro.key
try:
package = packages[name]
except KeyError:
groupcoll.unneeded.add(name)
continue
r = requirementslib.Requirement.from_pipfile(name, package)
if not r.is_named:
# Always mark non-named. I think pip does something similar?
groupcoll.outdated.add(name)
elif not _is_up_to_date(distro, r.get_version()):
groupcoll.outdated.add(name)
else:
groupcoll.uptodate.add(name)
return groupcoll
@contextlib.contextmanager
def _remove_package(name):
if name is None or not _is_installation_local(name):
yield None
return
with uninstall(name, auto_confirm=True, verbose=False) as uninstaller:
yield uninstaller
def _get_packages(lockfile, default, develop):
# Don't need to worry about duplicates because only extras can differ.
# Extras don't matter because they only affect dependencies, and we
# don't install dependencies anyway!
packages = {}
if default:
packages.update(lockfile.default._data)
if develop:
packages.update(lockfile.develop._data)
return packages
def _build_paths():
"""Prepare paths for distlib.wheel.Wheel to install into.
"""
paths = sysconfig.get_paths()
return {
"prefix": sys.prefix,
"data": paths["data"],
"scripts": paths["scripts"],
"headers": paths["include"],
"purelib": paths["purelib"],
"platlib": paths["platlib"],
}
PROTECTED_FROM_CLEAN = {"setuptools", "pip", "wheel"}
def _clean(names):
cleaned = set()
for name in names:
if name in PROTECTED_FROM_CLEAN:
continue
with _remove_package(name) as uninst:
if uninst:
cleaned.add(name)
return cleaned
class Synchronizer(object):
"""Helper class to install packages from a project's lock file.
"""
def __init__(self, project, default, develop, clean_unneeded):
self._root = project.root # Only for repr.
self.packages = _get_packages(project.lockfile, default, develop)
self.sources = project.lockfile.meta.sources._data
self.paths = _build_paths()
self.clean_unneeded = clean_unneeded
def __repr__(self):
return "<{0} @ {1!r}>".format(type(self).__name__, self._root)
def sync(self):
groupcoll = _group_installed_names(self.packages)
installed = set()
updated = set()
cleaned = set()
# TODO: Show a prompt to confirm cleaning. We will need to implement a
# reporter pattern for this as well.
if self.clean_unneeded:
names = _clean(groupcoll.unneeded)
cleaned.update(names)
# TODO: Specify installation order? (pypa/pipenv#2274)
installers = []
for name, package in self.packages.items():
r = requirementslib.Requirement.from_pipfile(name, package)
name = r.normalized_name
if name in groupcoll.uptodate:
continue
markers = r.markers
if markers and not packaging.markers.Marker(markers).evaluate():
continue
r.markers = None
if r.editable:
installer = EditableInstaller(r)
else:
installer = WheelInstaller(r, self.sources, self.paths)
try:
installer.prepare()
except Exception as e:
if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"):
raise
print("failed to prepare {0!r}: {1}".format(
r.as_line(include_hashes=False), e,
))
else:
installers.append((name, installer))
for name, installer in installers:
if name in groupcoll.outdated:
name_to_remove = name
else:
name_to_remove = None
try:
with _remove_package(name_to_remove):
installer.install()
except Exception as e:
if os.environ.get("PASSA_NO_SUPPRESS_EXCEPTIONS"):
raise
print("failed to install {0!r}: {1}".format(
r.as_line(include_hashes=False), e,
))
continue
if name in groupcoll.outdated or name in groupcoll.noremove:
updated.add(name)
else:
installed.add(name)
return installed, updated, cleaned
class Cleaner(object):
"""Helper class to clean packages not in a project's lock file.
"""
def __init__(self, project, default, develop):
self._root = project.root # Only for repr.
self.packages = _get_packages(project.lockfile, default, develop)
def __repr__(self):
return "<{0} @ {1!r}>".format(type(self).__name__, self._root)
def clean(self):
groupcoll = _group_installed_names(self.packages)
cleaned = _clean(groupcoll.unneeded)
return cleaned