login page
This commit is contained in:
1
Lib/site-packages/pipenv/vendor/requirementslib/models/__init__.py
vendored
Normal file
1
Lib/site-packages/pipenv/vendor/requirementslib/models/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/cache.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/cache.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/dependencies.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/dependencies.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/lockfile.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/lockfile.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/markers.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/markers.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/metadata.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/metadata.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/pipfile.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/pipfile.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/project.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/project.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/requirements.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/requirements.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/resolvers.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/resolvers.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/setup_info.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/setup_info.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/url.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/url.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/utils.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/utils.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/vcs.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/requirementslib/models/__pycache__/vcs.cpython-38.pyc
vendored
Normal file
Binary file not shown.
341
Lib/site-packages/pipenv/vendor/requirementslib/models/cache.py
vendored
Normal file
341
Lib/site-packages/pipenv/vendor/requirementslib/models/cache.py
vendored
Normal file
@@ -0,0 +1,341 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import atexit
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import vistir
|
||||
|
||||
from appdirs import user_cache_dir
|
||||
from pip_shims.shims import FAVORITE_HASH, SafeFileCache
|
||||
from packaging.requirements import Requirement
|
||||
|
||||
from .utils import as_tuple, key_from_req, lookup_table, get_pinned_version
|
||||
|
||||
from ..exceptions import FileExistsError
|
||||
|
||||
|
||||
CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv"))
|
||||
|
||||
|
||||
# Pip-tools cache implementation
|
||||
class CorruptCacheError(Exception):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def __str__(self):
|
||||
lines = [
|
||||
'The dependency cache seems to have been corrupted.',
|
||||
'Inspect, or delete, the following file:',
|
||||
' {}'.format(self.path),
|
||||
]
|
||||
return os.linesep.join(lines)
|
||||
|
||||
|
||||
def read_cache_file(cache_file_path):
|
||||
with open(cache_file_path, 'r') as cache_file:
|
||||
try:
|
||||
doc = json.load(cache_file)
|
||||
except ValueError:
|
||||
raise CorruptCacheError(cache_file_path)
|
||||
|
||||
# Check version and load the contents
|
||||
assert doc['__format__'] == 1, 'Unknown cache file format'
|
||||
return doc['dependencies']
|
||||
|
||||
|
||||
class DependencyCache(object):
|
||||
"""
|
||||
Creates a new persistent dependency cache for the current Python version.
|
||||
The cache file is written to the appropriate user cache dir for the
|
||||
current platform, i.e.
|
||||
|
||||
~/.cache/pip-tools/depcache-pyX.Y.json
|
||||
|
||||
Where X.Y indicates the Python version.
|
||||
"""
|
||||
def __init__(self, cache_dir=None):
|
||||
if cache_dir is None:
|
||||
cache_dir = CACHE_DIR
|
||||
if not vistir.compat.Path(CACHE_DIR).absolute().is_dir():
|
||||
try:
|
||||
vistir.path.mkdir_p(os.path.abspath(cache_dir))
|
||||
except (FileExistsError, OSError):
|
||||
pass
|
||||
|
||||
py_version = '.'.join(str(digit) for digit in sys.version_info[:2])
|
||||
cache_filename = 'depcache-py{}.json'.format(py_version)
|
||||
|
||||
self._cache_file = os.path.join(cache_dir, cache_filename)
|
||||
self._cache = None
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""
|
||||
The dictionary that is the actual in-memory cache. This property
|
||||
lazily loads the cache from disk.
|
||||
"""
|
||||
if self._cache is None:
|
||||
self.read_cache()
|
||||
return self._cache
|
||||
|
||||
def as_cache_key(self, ireq):
|
||||
"""
|
||||
Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards
|
||||
compatibility with cache files. For a requirement without extras, this will return, for example:
|
||||
|
||||
("ipython", "2.1.0")
|
||||
|
||||
For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets,
|
||||
like so:
|
||||
|
||||
("ipython", "2.1.0[nbconvert,notebook]")
|
||||
"""
|
||||
name, version, extras = as_tuple(ireq)
|
||||
if not extras:
|
||||
extras_string = ""
|
||||
else:
|
||||
extras_string = "[{}]".format(",".join(extras))
|
||||
return name, "{}{}".format(version, extras_string)
|
||||
|
||||
def read_cache(self):
|
||||
"""Reads the cached contents into memory."""
|
||||
if os.path.exists(self._cache_file):
|
||||
self._cache = read_cache_file(self._cache_file)
|
||||
else:
|
||||
self._cache = {}
|
||||
|
||||
def write_cache(self):
|
||||
"""Writes the cache to disk as JSON."""
|
||||
doc = {
|
||||
'__format__': 1,
|
||||
'dependencies': self._cache,
|
||||
}
|
||||
with open(self._cache_file, 'w') as f:
|
||||
json.dump(doc, f, sort_keys=True)
|
||||
|
||||
def clear(self):
|
||||
self._cache = {}
|
||||
self.write_cache()
|
||||
|
||||
def __contains__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return pkgversion_and_extras in self.cache.get(pkgname, {})
|
||||
|
||||
def __getitem__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return self.cache[pkgname][pkgversion_and_extras]
|
||||
|
||||
def __setitem__(self, ireq, values):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
self.cache.setdefault(pkgname, {})
|
||||
self.cache[pkgname][pkgversion_and_extras] = values
|
||||
self.write_cache()
|
||||
|
||||
def __delitem__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
try:
|
||||
del self.cache[pkgname][pkgversion_and_extras]
|
||||
except KeyError:
|
||||
return
|
||||
self.write_cache()
|
||||
|
||||
def get(self, ireq, default=None):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default)
|
||||
|
||||
def reverse_dependencies(self, ireqs):
|
||||
"""
|
||||
Returns a lookup table of reverse dependencies for all the given ireqs.
|
||||
|
||||
Since this is all static, it only works if the dependency cache
|
||||
contains the complete data, otherwise you end up with a partial view.
|
||||
This is typically no problem if you use this function after the entire
|
||||
dependency tree is resolved.
|
||||
"""
|
||||
ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs]
|
||||
return self._reverse_dependencies(ireqs_as_cache_values)
|
||||
|
||||
def _reverse_dependencies(self, cache_keys):
|
||||
"""
|
||||
Returns a lookup table of reverse dependencies for all the given cache keys.
|
||||
|
||||
Example input:
|
||||
|
||||
[('pep8', '1.5.7'),
|
||||
('flake8', '2.4.0'),
|
||||
('mccabe', '0.3'),
|
||||
('pyflakes', '0.8.1')]
|
||||
|
||||
Example output:
|
||||
|
||||
{'pep8': ['flake8'],
|
||||
'flake8': [],
|
||||
'mccabe': ['flake8'],
|
||||
'pyflakes': ['flake8']}
|
||||
|
||||
"""
|
||||
# First, collect all the dependencies into a sequence of (parent, child) tuples, like [('flake8', 'pep8'),
|
||||
# ('flake8', 'mccabe'), ...]
|
||||
return lookup_table((key_from_req(Requirement(dep_name)), name)
|
||||
for name, version_and_extras in cache_keys
|
||||
for dep_name in self.cache[name][version_and_extras])
|
||||
|
||||
|
||||
class HashCache(SafeFileCache):
|
||||
"""Caches hashes of PyPI artifacts so we do not need to re-download them.
|
||||
|
||||
Hashes are only cached when the URL appears to contain a hash in it and the
|
||||
cache key includes the hash value returned from the server). This ought to
|
||||
avoid ssues where the location on the server changes.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
session = kwargs.pop("session", None)
|
||||
if not session:
|
||||
import requests
|
||||
session = requests.session()
|
||||
atexit.register(session.close)
|
||||
cache_dir = kwargs.pop('cache_dir', CACHE_DIR)
|
||||
self.session = session
|
||||
kwargs.setdefault('directory', os.path.join(cache_dir, 'hash-cache'))
|
||||
super(HashCache, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_hash(self, location):
|
||||
from pip_shims import VcsSupport
|
||||
# if there is no location hash (i.e., md5 / sha256 / etc) we on't want to store it
|
||||
hash_value = None
|
||||
vcs = VcsSupport()
|
||||
orig_scheme = location.scheme
|
||||
new_location = copy.deepcopy(location)
|
||||
if orig_scheme in vcs.all_schemes:
|
||||
new_location.url = new_location.url.split("+", 1)[-1]
|
||||
can_hash = new_location.hash
|
||||
if can_hash:
|
||||
# hash url WITH fragment
|
||||
hash_value = self._get_file_hash(new_location.url) if not new_location.url.startswith("ssh") else None
|
||||
if not hash_value:
|
||||
hash_value = self._get_file_hash(new_location)
|
||||
hash_value = hash_value.encode('utf8')
|
||||
if can_hash:
|
||||
self.set(new_location.url, hash_value)
|
||||
return hash_value.decode('utf8')
|
||||
|
||||
def _get_file_hash(self, location):
|
||||
h = hashlib.new(FAVORITE_HASH)
|
||||
with vistir.contextmanagers.open_file(location, self.session) as fp:
|
||||
for chunk in iter(lambda: fp.read(8096), b""):
|
||||
h.update(chunk)
|
||||
return ":".join([FAVORITE_HASH, h.hexdigest()])
|
||||
|
||||
|
||||
class _JSONCache(object):
|
||||
"""A persistent cache backed by a JSON file.
|
||||
|
||||
The cache file is written to the appropriate user cache dir for the
|
||||
current platform, i.e.
|
||||
|
||||
~/.cache/pip-tools/depcache-pyX.Y.json
|
||||
|
||||
Where X.Y indicates the Python version.
|
||||
"""
|
||||
filename_format = None
|
||||
|
||||
def __init__(self, cache_dir=CACHE_DIR):
|
||||
vistir.mkdir_p(cache_dir)
|
||||
python_version = ".".join(str(digit) for digit in sys.version_info[:2])
|
||||
cache_filename = self.filename_format.format(
|
||||
python_version=python_version,
|
||||
)
|
||||
self._cache_file = os.path.join(cache_dir, cache_filename)
|
||||
self._cache = None
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""The dictionary that is the actual in-memory cache.
|
||||
|
||||
This property lazily loads the cache from disk.
|
||||
"""
|
||||
if self._cache is None:
|
||||
self.read_cache()
|
||||
return self._cache
|
||||
|
||||
def as_cache_key(self, ireq):
|
||||
"""Given a requirement, return its cache key.
|
||||
|
||||
This behavior is a little weird in order to allow backwards
|
||||
compatibility with cache files. For a requirement without extras, this
|
||||
will return, for example::
|
||||
|
||||
("ipython", "2.1.0")
|
||||
|
||||
For a requirement with extras, the extras will be comma-separated and
|
||||
appended to the version, inside brackets, like so::
|
||||
|
||||
("ipython", "2.1.0[nbconvert,notebook]")
|
||||
"""
|
||||
extras = tuple(sorted(ireq.extras))
|
||||
if not extras:
|
||||
extras_string = ""
|
||||
else:
|
||||
extras_string = "[{}]".format(",".join(extras))
|
||||
name = key_from_req(ireq.req)
|
||||
version = get_pinned_version(ireq)
|
||||
return name, "{}{}".format(version, extras_string)
|
||||
|
||||
def read_cache(self):
|
||||
"""Reads the cached contents into memory.
|
||||
"""
|
||||
if os.path.exists(self._cache_file):
|
||||
self._cache = read_cache_file(self._cache_file)
|
||||
else:
|
||||
self._cache = {}
|
||||
|
||||
def write_cache(self):
|
||||
"""Writes the cache to disk as JSON.
|
||||
"""
|
||||
doc = {
|
||||
'__format__': 1,
|
||||
'dependencies': self._cache,
|
||||
}
|
||||
with open(self._cache_file, 'w') as f:
|
||||
json.dump(doc, f, sort_keys=True)
|
||||
|
||||
def clear(self):
|
||||
self._cache = {}
|
||||
self.write_cache()
|
||||
|
||||
def __contains__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return pkgversion_and_extras in self.cache.get(pkgname, {})
|
||||
|
||||
def __getitem__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return self.cache[pkgname][pkgversion_and_extras]
|
||||
|
||||
def __setitem__(self, ireq, values):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
self.cache.setdefault(pkgname, {})
|
||||
self.cache[pkgname][pkgversion_and_extras] = values
|
||||
self.write_cache()
|
||||
|
||||
def __delitem__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
try:
|
||||
del self.cache[pkgname][pkgversion_and_extras]
|
||||
except KeyError:
|
||||
return
|
||||
self.write_cache()
|
||||
|
||||
def get(self, ireq, default=None):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return self.cache.get(pkgname, {}).get(pkgversion_and_extras, default)
|
||||
|
||||
|
||||
class RequiresPythonCache(_JSONCache):
|
||||
"""Cache a candidate's Requires-Python information.
|
||||
"""
|
||||
filename_format = "pyreqcache-py{python_version}.json"
|
||||
663
Lib/site-packages/pipenv/vendor/requirementslib/models/dependencies.py
vendored
Normal file
663
Lib/site-packages/pipenv/vendor/requirementslib/models/dependencies.py
vendored
Normal file
@@ -0,0 +1,663 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
|
||||
import atexit
|
||||
import contextlib
|
||||
import copy
|
||||
import functools
|
||||
import os
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import packaging.markers
|
||||
import packaging.version
|
||||
import pip_shims.shims
|
||||
import requests
|
||||
from packaging.utils import canonicalize_name
|
||||
from vistir.compat import JSONDecodeError, fs_str
|
||||
from vistir.contextmanagers import cd, temp_environ
|
||||
from vistir.path import create_tracked_tempdir
|
||||
|
||||
from ..environment import MYPY_RUNNING
|
||||
from ..utils import _ensure_dir, prepare_pip_source_args
|
||||
from .cache import CACHE_DIR, DependencyCache
|
||||
from .setup_info import SetupInfo
|
||||
from .utils import (
|
||||
clean_requires_python,
|
||||
fix_requires_python_marker,
|
||||
format_requirement,
|
||||
full_groupby,
|
||||
is_pinned_requirement,
|
||||
key_from_ireq,
|
||||
make_install_requirement,
|
||||
name_from_req,
|
||||
version_from_ireq,
|
||||
)
|
||||
|
||||
try:
|
||||
from contextlib import ExitStack
|
||||
except ImportError:
|
||||
from contextlib2 import ExitStack
|
||||
|
||||
if MYPY_RUNNING:
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Generator,
|
||||
Optional,
|
||||
Union,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Text,
|
||||
Set,
|
||||
)
|
||||
from pip_shims.shims import (
|
||||
InstallRequirement,
|
||||
InstallationCandidate,
|
||||
PackageFinder,
|
||||
Command,
|
||||
)
|
||||
from packaging.requirements import Requirement as PackagingRequirement
|
||||
from packaging.markers import Marker
|
||||
|
||||
TRequirement = TypeVar("TRequirement")
|
||||
RequirementType = TypeVar(
|
||||
"RequirementType", covariant=True, bound=PackagingRequirement
|
||||
)
|
||||
MarkerType = TypeVar("MarkerType", covariant=True, bound=Marker)
|
||||
STRING_TYPE = Union[str, bytes, Text]
|
||||
S = TypeVar("S", bytes, str, Text)
|
||||
|
||||
|
||||
PKGS_DOWNLOAD_DIR = fs_str(os.path.join(CACHE_DIR, "pkgs"))
|
||||
WHEEL_DOWNLOAD_DIR = fs_str(os.path.join(CACHE_DIR, "wheels"))
|
||||
|
||||
DEPENDENCY_CACHE = DependencyCache()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _get_wheel_cache():
|
||||
with pip_shims.shims.global_tempdir_manager():
|
||||
yield pip_shims.shims.WheelCache(
|
||||
CACHE_DIR, pip_shims.shims.FormatControl(set(), set())
|
||||
)
|
||||
|
||||
|
||||
def _get_filtered_versions(ireq, versions, prereleases):
|
||||
return set(ireq.specifier.filter(versions, prereleases=prereleases))
|
||||
|
||||
|
||||
def find_all_matches(finder, ireq, pre=False):
|
||||
# type: (PackageFinder, InstallRequirement, bool) -> List[InstallationCandidate]
|
||||
"""Find all matching dependencies using the supplied finder and the
|
||||
given ireq.
|
||||
|
||||
:param finder: A package finder for discovering matching candidates.
|
||||
:type finder: :class:`~pip._internal.index.PackageFinder`
|
||||
:param ireq: An install requirement.
|
||||
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: A list of matching candidates.
|
||||
:rtype: list[:class:`~pip._internal.index.InstallationCandidate`]
|
||||
"""
|
||||
|
||||
candidates = clean_requires_python(finder.find_all_candidates(ireq.name))
|
||||
versions = {candidate.version for candidate in candidates}
|
||||
allowed_versions = _get_filtered_versions(ireq, versions, pre)
|
||||
if not pre and not allowed_versions:
|
||||
allowed_versions = _get_filtered_versions(ireq, versions, True)
|
||||
candidates = {c for c in candidates if c.version in allowed_versions}
|
||||
return candidates
|
||||
|
||||
|
||||
def get_pip_command():
|
||||
# type: () -> Command
|
||||
# Use pip's parser for pip.conf management and defaults.
|
||||
# General options (find_links, index_url, extra_index_url, trusted_host,
|
||||
# and pre) are defered to pip.
|
||||
pip_command = pip_shims.shims.InstallCommand()
|
||||
return pip_command
|
||||
|
||||
|
||||
@attr.s
|
||||
class AbstractDependency(object):
|
||||
name = attr.ib() # type: STRING_TYPE
|
||||
specifiers = attr.ib()
|
||||
markers = attr.ib()
|
||||
candidates = attr.ib()
|
||||
requirement = attr.ib()
|
||||
parent = attr.ib()
|
||||
finder = attr.ib()
|
||||
dep_dict = attr.ib(default=attr.Factory(dict))
|
||||
|
||||
@property
|
||||
def version_set(self):
|
||||
"""Return the set of versions for the candidates in this abstract dependency.
|
||||
|
||||
:return: A set of matching versions
|
||||
:rtype: set(str)
|
||||
"""
|
||||
|
||||
if len(self.candidates) == 1:
|
||||
return set()
|
||||
return set(packaging.version.parse(version_from_ireq(c)) for c in self.candidates)
|
||||
|
||||
def compatible_versions(self, other):
|
||||
"""Find compatible version numbers between this abstract
|
||||
dependency and another one.
|
||||
|
||||
:param other: An abstract dependency to compare with.
|
||||
:type other: :class:`~requirementslib.models.dependency.AbstractDependency`
|
||||
:return: A set of compatible version strings
|
||||
:rtype: set(str)
|
||||
"""
|
||||
|
||||
if len(self.candidates) == 1 and next(iter(self.candidates)).editable:
|
||||
return self
|
||||
elif len(other.candidates) == 1 and next(iter(other.candidates)).editable:
|
||||
return other
|
||||
return self.version_set & other.version_set
|
||||
|
||||
def compatible_abstract_dep(self, other):
|
||||
"""Merge this abstract dependency with another one.
|
||||
|
||||
Return the result of the merge as a new abstract dependency.
|
||||
|
||||
:param other: An abstract dependency to merge with
|
||||
:type other: :class:`~requirementslib.models.dependency.AbstractDependency`
|
||||
:return: A new, combined abstract dependency
|
||||
:rtype: :class:`~requirementslib.models.dependency.AbstractDependency`
|
||||
"""
|
||||
|
||||
from .requirements import Requirement
|
||||
|
||||
if len(self.candidates) == 1 and next(iter(self.candidates)).editable:
|
||||
return self
|
||||
elif len(other.candidates) == 1 and next(iter(other.candidates)).editable:
|
||||
return other
|
||||
new_specifiers = self.specifiers & other.specifiers
|
||||
markers = set(self.markers) if self.markers else set()
|
||||
if other.markers:
|
||||
markers.add(other.markers)
|
||||
new_markers = None
|
||||
if markers:
|
||||
new_markers = packaging.markers.Marker(
|
||||
" or ".join(str(m) for m in sorted(markers))
|
||||
)
|
||||
new_ireq = copy.deepcopy(self.requirement.ireq)
|
||||
new_ireq.req.specifier = new_specifiers
|
||||
new_ireq.req.marker = new_markers
|
||||
new_requirement = Requirement.from_line(format_requirement(new_ireq))
|
||||
compatible_versions = self.compatible_versions(other)
|
||||
if isinstance(compatible_versions, AbstractDependency):
|
||||
return compatible_versions
|
||||
candidates = [
|
||||
c
|
||||
for c in self.candidates
|
||||
if packaging.version.parse(version_from_ireq(c)) in compatible_versions
|
||||
]
|
||||
dep_dict = {}
|
||||
candidate_strings = [format_requirement(c) for c in candidates]
|
||||
for c in candidate_strings:
|
||||
if c in self.dep_dict:
|
||||
dep_dict[c] = self.dep_dict.get(c)
|
||||
return AbstractDependency(
|
||||
name=self.name,
|
||||
specifiers=new_specifiers,
|
||||
markers=new_markers,
|
||||
candidates=candidates,
|
||||
requirement=new_requirement,
|
||||
parent=self.parent,
|
||||
dep_dict=dep_dict,
|
||||
finder=self.finder,
|
||||
)
|
||||
|
||||
def get_deps(self, candidate):
|
||||
"""Get the dependencies of the supplied candidate.
|
||||
|
||||
:param candidate: An installrequirement
|
||||
:type candidate: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: A list of abstract dependencies
|
||||
:rtype: list[:class:`~requirementslib.models.dependency.AbstractDependency`]
|
||||
"""
|
||||
|
||||
key = format_requirement(candidate)
|
||||
if key not in self.dep_dict:
|
||||
from .requirements import Requirement
|
||||
|
||||
req = Requirement.from_line(key)
|
||||
req = req.merge_markers(self.markers)
|
||||
self.dep_dict[key] = req.get_abstract_dependencies()
|
||||
return self.dep_dict[key]
|
||||
|
||||
@classmethod
|
||||
def from_requirement(cls, requirement, parent=None):
|
||||
"""Creates a new :class:`~requirementslib.models.dependency.AbstractDependency`
|
||||
from a :class:`~requirementslib.models.requirements.Requirement` object.
|
||||
|
||||
This class is used to find all candidates matching a given set of specifiers
|
||||
and a given requirement.
|
||||
|
||||
:param requirement: A requirement for resolution
|
||||
:type requirement: :class:`~requirementslib.models.requirements.Requirement` object.
|
||||
"""
|
||||
name = requirement.normalized_name
|
||||
specifiers = requirement.ireq.specifier if not requirement.editable else ""
|
||||
markers = requirement.ireq.markers
|
||||
extras = requirement.ireq.extras
|
||||
is_pinned = is_pinned_requirement(requirement.ireq)
|
||||
is_constraint = bool(parent)
|
||||
_, finder = get_finder(sources=None)
|
||||
candidates = []
|
||||
if not is_pinned and not requirement.editable:
|
||||
for r in requirement.find_all_matches(finder=finder):
|
||||
req = make_install_requirement(
|
||||
name,
|
||||
r.version,
|
||||
extras=extras,
|
||||
markers=markers,
|
||||
constraint=is_constraint,
|
||||
)
|
||||
req.req.link = getattr(r, "location", getattr(r, "link", None))
|
||||
req.parent = parent
|
||||
candidates.append(req)
|
||||
candidates = sorted(
|
||||
set(candidates),
|
||||
key=lambda k: packaging.version.parse(version_from_ireq(k)),
|
||||
)
|
||||
else:
|
||||
candidates = [requirement.ireq]
|
||||
return cls(
|
||||
name=name,
|
||||
specifiers=specifiers,
|
||||
markers=markers,
|
||||
candidates=candidates,
|
||||
requirement=requirement,
|
||||
parent=parent,
|
||||
finder=finder,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, line, parent=None):
|
||||
from .requirements import Requirement
|
||||
|
||||
req = Requirement.from_line(line)
|
||||
abstract_dep = cls.from_requirement(req, parent=parent)
|
||||
return abstract_dep
|
||||
|
||||
|
||||
def get_abstract_dependencies(reqs, sources=None, parent=None):
|
||||
"""Get all abstract dependencies for a given list of requirements.
|
||||
|
||||
Given a set of requirements, convert each requirement to an Abstract Dependency.
|
||||
|
||||
:param reqs: A list of Requirements
|
||||
:type reqs: list[:class:`~requirementslib.models.requirements.Requirement`]
|
||||
:param sources: Pipfile-formatted sources, defaults to None
|
||||
:param sources: list[dict], optional
|
||||
:param parent: The parent of this list of dependencies, defaults to None
|
||||
:param parent: :class:`~requirementslib.models.requirements.Requirement`, optional
|
||||
:return: A list of Abstract Dependencies
|
||||
:rtype: list[:class:`~requirementslib.models.dependency.AbstractDependency`]
|
||||
"""
|
||||
|
||||
deps = []
|
||||
from .requirements import Requirement
|
||||
|
||||
for req in reqs:
|
||||
if isinstance(req, pip_shims.shims.InstallRequirement):
|
||||
requirement = Requirement.from_line("{0}{1}".format(req.name, req.specifier))
|
||||
if req.link:
|
||||
requirement.req.link = req.link
|
||||
requirement.markers = req.markers
|
||||
requirement.req.markers = req.markers
|
||||
requirement.extras = req.extras
|
||||
requirement.req.extras = req.extras
|
||||
elif isinstance(req, Requirement):
|
||||
requirement = copy.deepcopy(req)
|
||||
else:
|
||||
requirement = Requirement.from_line(req)
|
||||
dep = AbstractDependency.from_requirement(requirement, parent=parent)
|
||||
deps.append(dep)
|
||||
return deps
|
||||
|
||||
|
||||
def get_dependencies(ireq, sources=None, parent=None):
|
||||
# type: (Union[InstallRequirement, InstallationCandidate], Optional[List[Dict[S, Union[S, bool]]]], Optional[AbstractDependency]) -> Set[S, ...]
|
||||
"""Get all dependencies for a given install requirement.
|
||||
|
||||
:param ireq: A single InstallRequirement
|
||||
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:param sources: Pipfile-formatted sources, defaults to None
|
||||
:type sources: list[dict], optional
|
||||
:param parent: The parent of this list of dependencies, defaults to None
|
||||
:type parent: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: A set of dependency lines for generating new InstallRequirements.
|
||||
:rtype: set(str)
|
||||
"""
|
||||
if not isinstance(ireq, pip_shims.shims.InstallRequirement):
|
||||
name = getattr(ireq, "project_name", getattr(ireq, "project", ireq.name))
|
||||
version = getattr(ireq, "version", None)
|
||||
if not version:
|
||||
ireq = pip_shims.shims.InstallRequirement.from_line("{0}".format(name))
|
||||
else:
|
||||
ireq = pip_shims.shims.InstallRequirement.from_line(
|
||||
"{0}=={1}".format(name, version)
|
||||
)
|
||||
pip_options = get_pip_options(sources=sources)
|
||||
getters = [
|
||||
get_dependencies_from_cache,
|
||||
get_dependencies_from_wheel_cache,
|
||||
get_dependencies_from_json,
|
||||
functools.partial(get_dependencies_from_index, pip_options=pip_options),
|
||||
]
|
||||
for getter in getters:
|
||||
deps = getter(ireq)
|
||||
if deps is not None:
|
||||
return deps
|
||||
raise RuntimeError("failed to get dependencies for {}".format(ireq))
|
||||
|
||||
|
||||
def get_dependencies_from_wheel_cache(ireq):
|
||||
# type: (pip_shims.shims.InstallRequirement) -> Optional[Set[pip_shims.shims.InstallRequirement]]
|
||||
"""Retrieves dependencies for the given install requirement from the wheel cache.
|
||||
|
||||
:param ireq: A single InstallRequirement
|
||||
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: A set of dependency lines for generating new InstallRequirements.
|
||||
:rtype: set(str) or None
|
||||
"""
|
||||
|
||||
if ireq.editable or not is_pinned_requirement(ireq):
|
||||
return
|
||||
with _get_wheel_cache() as wheel_cache:
|
||||
matches = wheel_cache.get(ireq.link, name_from_req(ireq.req))
|
||||
if matches:
|
||||
matches = set(matches)
|
||||
if not DEPENDENCY_CACHE.get(ireq):
|
||||
DEPENDENCY_CACHE[ireq] = [format_requirement(m) for m in matches]
|
||||
return matches
|
||||
return None
|
||||
|
||||
|
||||
def _marker_contains_extra(ireq):
|
||||
# TODO: Implement better parsing logic avoid false-positives.
|
||||
return "extra" in repr(ireq.markers)
|
||||
|
||||
|
||||
def get_dependencies_from_json(ireq):
|
||||
"""Retrieves dependencies for the given install requirement from the json api.
|
||||
|
||||
:param ireq: A single InstallRequirement
|
||||
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: A set of dependency lines for generating new InstallRequirements.
|
||||
:rtype: set(str) or None
|
||||
"""
|
||||
|
||||
if ireq.editable or not is_pinned_requirement(ireq):
|
||||
return
|
||||
|
||||
# It is technically possible to parse extras out of the JSON API's
|
||||
# requirement format, but it is such a chore let's just use the simple API.
|
||||
if ireq.extras:
|
||||
return
|
||||
|
||||
session = requests.session()
|
||||
atexit.register(session.close)
|
||||
version = str(ireq.req.specifier).lstrip("=")
|
||||
|
||||
def gen(ireq):
|
||||
info = None
|
||||
try:
|
||||
info = session.get(
|
||||
"https://pypi.org/pypi/{0}/{1}/json".format(ireq.req.name, version)
|
||||
).json()["info"]
|
||||
finally:
|
||||
session.close()
|
||||
requires_dist = info.get("requires_dist", info.get("requires"))
|
||||
if not requires_dist: # The API can return None for this.
|
||||
return
|
||||
for requires in requires_dist:
|
||||
i = pip_shims.shims.InstallRequirement.from_line(requires)
|
||||
# See above, we don't handle requirements with extras.
|
||||
if not _marker_contains_extra(i):
|
||||
yield format_requirement(i)
|
||||
|
||||
if ireq not in DEPENDENCY_CACHE:
|
||||
try:
|
||||
reqs = DEPENDENCY_CACHE[ireq] = list(gen(ireq))
|
||||
except JSONDecodeError:
|
||||
return
|
||||
req_iter = iter(reqs)
|
||||
else:
|
||||
req_iter = gen(ireq)
|
||||
return set(req_iter)
|
||||
|
||||
|
||||
def get_dependencies_from_cache(ireq):
|
||||
"""Retrieves dependencies for the given install requirement from the dependency cache.
|
||||
|
||||
:param ireq: A single InstallRequirement
|
||||
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: A set of dependency lines for generating new InstallRequirements.
|
||||
:rtype: set(str) or None
|
||||
"""
|
||||
if ireq.editable or not is_pinned_requirement(ireq):
|
||||
return
|
||||
if ireq not in DEPENDENCY_CACHE:
|
||||
return
|
||||
cached = set(DEPENDENCY_CACHE[ireq])
|
||||
|
||||
# Preserving sanity: Run through the cache and make sure every entry if
|
||||
# valid. If this fails, something is wrong with the cache. Drop it.
|
||||
try:
|
||||
broken = False
|
||||
for line in cached:
|
||||
dep_ireq = pip_shims.shims.InstallRequirement.from_line(line)
|
||||
name = canonicalize_name(dep_ireq.name)
|
||||
if _marker_contains_extra(dep_ireq):
|
||||
broken = True # The "extra =" marker breaks everything.
|
||||
elif name == canonicalize_name(ireq.name):
|
||||
broken = True # A package cannot depend on itself.
|
||||
if broken:
|
||||
break
|
||||
except Exception:
|
||||
broken = True
|
||||
|
||||
if broken:
|
||||
del DEPENDENCY_CACHE[ireq]
|
||||
return
|
||||
|
||||
return cached
|
||||
|
||||
|
||||
def is_python(section):
|
||||
return section.startswith("[") and ":" in section
|
||||
|
||||
|
||||
def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache=None):
|
||||
"""Retrieves dependencies for the given install requirement from the pip resolver.
|
||||
|
||||
:param dep: A single InstallRequirement
|
||||
:type dep: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:param sources: Pipfile-formatted sources, defaults to None
|
||||
:type sources: list[dict], optional
|
||||
:return: A set of dependency lines for generating new InstallRequirements.
|
||||
:rtype: set(str) or None
|
||||
"""
|
||||
|
||||
session, finder = get_finder(sources=sources, pip_options=pip_options)
|
||||
dep.is_direct = True
|
||||
requirements = None
|
||||
setup_requires = {}
|
||||
with temp_environ(), ExitStack() as stack:
|
||||
if not wheel_cache:
|
||||
wheel_cache = stack.enter_context(_get_wheel_cache())
|
||||
os.environ["PIP_EXISTS_ACTION"] = "i"
|
||||
if dep.editable and not dep.prepared and not dep.req:
|
||||
setup_info = SetupInfo.from_ireq(dep)
|
||||
results = setup_info.get_info()
|
||||
setup_requires.update(results["setup_requires"])
|
||||
requirements = set(results["requires"].values())
|
||||
else:
|
||||
results = pip_shims.shims.resolve(dep)
|
||||
requirements = [v for v in results.values() if v.name != dep.name]
|
||||
requirements = set([format_requirement(r) for r in requirements])
|
||||
if not dep.editable and is_pinned_requirement(dep) and requirements is not None:
|
||||
DEPENDENCY_CACHE[dep] = list(requirements)
|
||||
return requirements
|
||||
|
||||
|
||||
def get_pip_options(args=[], sources=None, pip_command=None):
|
||||
"""Build a pip command from a list of sources
|
||||
|
||||
:param args: positional arguments passed through to the pip parser
|
||||
:param sources: A list of pipfile-formatted sources, defaults to None
|
||||
:param sources: list[dict], optional
|
||||
:param pip_command: A pre-built pip command instance
|
||||
:type pip_command: :class:`~pip._internal.cli.base_command.Command`
|
||||
:return: An instance of pip_options using the supplied arguments plus sane defaults
|
||||
:rtype: :class:`~pip._internal.cli.cmdoptions`
|
||||
"""
|
||||
|
||||
if not pip_command:
|
||||
pip_command = get_pip_command()
|
||||
if not sources:
|
||||
sources = [{"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True}]
|
||||
_ensure_dir(CACHE_DIR)
|
||||
pip_args = args
|
||||
pip_args = prepare_pip_source_args(sources, pip_args)
|
||||
pip_options, _ = pip_command.parser.parse_args(pip_args)
|
||||
pip_options.cache_dir = CACHE_DIR
|
||||
return pip_options
|
||||
|
||||
|
||||
def get_finder(sources=None, pip_command=None, pip_options=None):
|
||||
# type: (List[Dict[S, Union[S, bool]]], Optional[Command], Any) -> PackageFinder
|
||||
"""Get a package finder for looking up candidates to install
|
||||
|
||||
:param sources: A list of pipfile-formatted sources, defaults to None
|
||||
:param sources: list[dict], optional
|
||||
:param pip_command: A pip command instance, defaults to None
|
||||
:type pip_command: :class:`~pip._internal.cli.base_command.Command`
|
||||
:param pip_options: A pip options, defaults to None
|
||||
:type pip_options: :class:`~pip._internal.cli.cmdoptions`
|
||||
:return: A package finder
|
||||
:rtype: :class:`~pip._internal.index.PackageFinder`
|
||||
"""
|
||||
|
||||
if not pip_command:
|
||||
pip_command = pip_shims.shims.InstallCommand()
|
||||
if not sources:
|
||||
sources = [{"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True}]
|
||||
if not pip_options:
|
||||
pip_options = get_pip_options(sources=sources, pip_command=pip_command)
|
||||
session = pip_command._build_session(pip_options)
|
||||
atexit.register(session.close)
|
||||
finder = pip_shims.shims.get_package_finder(
|
||||
pip_shims.shims.InstallCommand(), options=pip_options, session=session
|
||||
)
|
||||
return session, finder
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def start_resolver(finder=None, session=None, wheel_cache=None):
|
||||
"""Context manager to produce a resolver.
|
||||
|
||||
:param finder: A package finder to use for searching the index
|
||||
:type finder: :class:`~pip._internal.index.PackageFinder`
|
||||
:param :class:`~requests.Session` session: A session instance
|
||||
:param :class:`~pip._internal.cache.WheelCache` wheel_cache: A pip WheelCache instance
|
||||
:return: A 3-tuple of finder, preparer, resolver
|
||||
:rtype: (:class:`~pip._internal.operations.prepare.RequirementPreparer`, :class:`~pip._internal.resolve.Resolver`)
|
||||
"""
|
||||
|
||||
pip_command = get_pip_command()
|
||||
pip_options = get_pip_options(pip_command=pip_command)
|
||||
session = None
|
||||
if not finder:
|
||||
session, finder = get_finder(pip_command=pip_command, pip_options=pip_options)
|
||||
if not session:
|
||||
session = pip_command._build_session(pip_options)
|
||||
|
||||
download_dir = PKGS_DOWNLOAD_DIR
|
||||
_ensure_dir(download_dir)
|
||||
|
||||
_build_dir = create_tracked_tempdir(fs_str("build"))
|
||||
_source_dir = create_tracked_tempdir(fs_str("source"))
|
||||
try:
|
||||
with ExitStack() as ctx:
|
||||
ctx.enter_context(pip_shims.shims.global_tempdir_manager())
|
||||
if not wheel_cache:
|
||||
wheel_cache = ctx.enter_context(_get_wheel_cache())
|
||||
_ensure_dir(fs_str(os.path.join(wheel_cache.cache_dir, "wheels")))
|
||||
preparer = ctx.enter_context(
|
||||
pip_shims.shims.make_preparer(
|
||||
options=pip_options,
|
||||
finder=finder,
|
||||
session=session,
|
||||
build_dir=_build_dir,
|
||||
src_dir=_source_dir,
|
||||
download_dir=download_dir,
|
||||
wheel_download_dir=WHEEL_DOWNLOAD_DIR,
|
||||
progress_bar="off",
|
||||
build_isolation=False,
|
||||
install_cmd=pip_command,
|
||||
)
|
||||
)
|
||||
resolver = pip_shims.shims.get_resolver(
|
||||
finder=finder,
|
||||
ignore_dependencies=False,
|
||||
ignore_requires_python=True,
|
||||
preparer=preparer,
|
||||
session=session,
|
||||
options=pip_options,
|
||||
install_cmd=pip_command,
|
||||
wheel_cache=wheel_cache,
|
||||
force_reinstall=True,
|
||||
ignore_installed=True,
|
||||
upgrade_strategy="to-satisfy-only",
|
||||
isolated=False,
|
||||
use_user_site=False,
|
||||
)
|
||||
yield resolver
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def get_grouped_dependencies(constraints):
|
||||
# We need to track what contributed a specifierset
|
||||
# as well as which specifiers were required by the root node
|
||||
# in order to resolve any conflicts when we are deciding which thing to backtrack on
|
||||
# then we take the loose match (which _is_ flexible) and start moving backwards in
|
||||
# versions by popping them off of a stack and checking for the conflicting package
|
||||
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
|
||||
ireqs = sorted(ireqs, key=lambda ireq: ireq.editable)
|
||||
editable_ireq = next(iter(ireq for ireq in ireqs if ireq.editable), None)
|
||||
if editable_ireq:
|
||||
yield editable_ireq # only the editable match mattters, ignore all others
|
||||
continue
|
||||
ireqs = iter(ireqs)
|
||||
# deepcopy the accumulator so as to not modify the self.our_constraints invariant
|
||||
combined_ireq = copy.deepcopy(next(ireqs))
|
||||
for ireq in ireqs:
|
||||
# NOTE we may be losing some info on dropped reqs here
|
||||
try:
|
||||
combined_ireq.req.specifier &= ireq.req.specifier
|
||||
except TypeError:
|
||||
if ireq.req.specifier._specs and not combined_ireq.req.specifier._specs:
|
||||
combined_ireq.req.specifier._specs = ireq.req.specifier._specs
|
||||
combined_ireq.constraint &= ireq.constraint
|
||||
if not combined_ireq.markers:
|
||||
combined_ireq.markers = ireq.markers
|
||||
else:
|
||||
_markers = combined_ireq.markers._markers
|
||||
if not isinstance(_markers[0], (tuple, list)):
|
||||
combined_ireq.markers._markers = [
|
||||
_markers,
|
||||
"and",
|
||||
ireq.markers._markers,
|
||||
]
|
||||
# Return a sorted, de-duped tuple of extras
|
||||
combined_ireq.extras = tuple(
|
||||
sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
|
||||
)
|
||||
yield combined_ireq
|
||||
309
Lib/site-packages/pipenv/vendor/requirementslib/models/lockfile.py
vendored
Normal file
309
Lib/site-packages/pipenv/vendor/requirementslib/models/lockfile.py
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
import os
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import plette.lockfiles
|
||||
import six
|
||||
from vistir.compat import FileNotFoundError, JSONDecodeError, Path
|
||||
|
||||
from ..exceptions import LockfileCorruptException, MissingParameter, PipfileNotFound
|
||||
from ..utils import is_editable, is_vcs, merge_items
|
||||
from .project import ProjectFile
|
||||
from .requirements import Requirement
|
||||
from .utils import optional_instance_of
|
||||
|
||||
DEFAULT_NEWLINES = six.text_type("\n")
|
||||
|
||||
|
||||
def preferred_newlines(f):
|
||||
if isinstance(f.newlines, six.text_type):
|
||||
return f.newlines
|
||||
return DEFAULT_NEWLINES
|
||||
|
||||
|
||||
is_lockfile = optional_instance_of(plette.lockfiles.Lockfile)
|
||||
is_projectfile = optional_instance_of(ProjectFile)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Lockfile(object):
|
||||
path = attr.ib(validator=optional_instance_of(Path), type=Path)
|
||||
_requirements = attr.ib(default=attr.Factory(list), type=list)
|
||||
_dev_requirements = attr.ib(default=attr.Factory(list), type=list)
|
||||
projectfile = attr.ib(validator=is_projectfile, type=ProjectFile)
|
||||
_lockfile = attr.ib(validator=is_lockfile, type=plette.lockfiles.Lockfile)
|
||||
newlines = attr.ib(default=DEFAULT_NEWLINES, type=six.text_type)
|
||||
|
||||
@path.default
|
||||
def _get_path(self):
|
||||
return Path(os.curdir).joinpath("Pipfile.lock").absolute()
|
||||
|
||||
@projectfile.default
|
||||
def _get_projectfile(self):
|
||||
return self.load_projectfile(self.path)
|
||||
|
||||
@_lockfile.default
|
||||
def _get_lockfile(self):
|
||||
return self.projectfile.model
|
||||
|
||||
@property
|
||||
def lockfile(self):
|
||||
return self._lockfile
|
||||
|
||||
@property
|
||||
def section_keys(self):
|
||||
return ["default", "develop"]
|
||||
|
||||
@property
|
||||
def extended_keys(self):
|
||||
return [k for k in itertools.product(self.section_keys, ["", "vcs", "editable"])]
|
||||
|
||||
def get(self, k):
|
||||
return self.__getitem__(k)
|
||||
|
||||
def __contains__(self, k):
|
||||
check_lockfile = k in self.extended_keys or self.lockfile.__contains__(k)
|
||||
if check_lockfile:
|
||||
return True
|
||||
return super(Lockfile, self).__contains__(k)
|
||||
|
||||
def __setitem__(self, k, v):
|
||||
lockfile = self._lockfile
|
||||
lockfile.__setitem__(k, v)
|
||||
|
||||
def __getitem__(self, k, *args, **kwargs):
|
||||
retval = None
|
||||
lockfile = self._lockfile
|
||||
section = None
|
||||
pkg_type = None
|
||||
try:
|
||||
retval = lockfile[k]
|
||||
except KeyError:
|
||||
if "-" in k:
|
||||
section, _, pkg_type = k.rpartition("-")
|
||||
vals = getattr(lockfile.get(section, {}), "_data", {})
|
||||
if pkg_type == "vcs":
|
||||
retval = {k: v for k, v in vals.items() if is_vcs(v)}
|
||||
elif pkg_type == "editable":
|
||||
retval = {k: v for k, v in vals.items() if is_editable(v)}
|
||||
if retval is None:
|
||||
raise
|
||||
else:
|
||||
retval = getattr(retval, "_data", retval)
|
||||
return retval
|
||||
|
||||
def __getattr__(self, k, *args, **kwargs):
|
||||
retval = None
|
||||
lockfile = super(Lockfile, self).__getattribute__("_lockfile")
|
||||
try:
|
||||
return super(Lockfile, self).__getattribute__(k)
|
||||
except AttributeError:
|
||||
retval = getattr(lockfile, k, None)
|
||||
if retval is not None:
|
||||
return retval
|
||||
return super(Lockfile, self).__getattribute__(k, *args, **kwargs)
|
||||
|
||||
def get_deps(self, dev=False, only=True):
|
||||
deps = {}
|
||||
if dev:
|
||||
deps.update(self.develop._data)
|
||||
if only:
|
||||
return deps
|
||||
deps = merge_items([deps, self.default._data])
|
||||
return deps
|
||||
|
||||
@classmethod
|
||||
def read_projectfile(cls, path):
|
||||
"""Read the specified project file and provide an interface for writing/updating.
|
||||
|
||||
:param str path: Path to the target file.
|
||||
:return: A project file with the model and location for interaction
|
||||
:rtype: :class:`~requirementslib.models.project.ProjectFile`
|
||||
"""
|
||||
|
||||
pf = ProjectFile.read(path, plette.lockfiles.Lockfile, invalid_ok=True)
|
||||
return pf
|
||||
|
||||
@classmethod
|
||||
def lockfile_from_pipfile(cls, pipfile_path):
|
||||
from .pipfile import Pipfile
|
||||
|
||||
if os.path.isfile(pipfile_path):
|
||||
if not os.path.isabs(pipfile_path):
|
||||
pipfile_path = os.path.abspath(pipfile_path)
|
||||
pipfile = Pipfile.load(os.path.dirname(pipfile_path))
|
||||
return plette.lockfiles.Lockfile.with_meta_from(pipfile._pipfile)
|
||||
raise PipfileNotFound(pipfile_path)
|
||||
|
||||
@classmethod
|
||||
def load_projectfile(cls, path, create=True, data=None):
|
||||
"""Given a path, load or create the necessary lockfile.
|
||||
|
||||
:param str path: Path to the project root or lockfile
|
||||
:param bool create: Whether to create the lockfile if not found, defaults to True
|
||||
:raises OSError: Thrown if the project root directory doesn't exist
|
||||
:raises FileNotFoundError: Thrown if the lockfile doesn't exist and ``create=False``
|
||||
:return: A project file instance for the supplied project
|
||||
:rtype: :class:`~requirementslib.models.project.ProjectFile`
|
||||
"""
|
||||
|
||||
if not path:
|
||||
path = os.curdir
|
||||
path = Path(path).absolute()
|
||||
project_path = path if path.is_dir() else path.parent
|
||||
lockfile_path = path if path.is_file() else project_path / "Pipfile.lock"
|
||||
if not project_path.exists():
|
||||
raise OSError("Project does not exist: %s" % project_path.as_posix())
|
||||
elif not lockfile_path.exists() and not create:
|
||||
raise FileNotFoundError(
|
||||
"Lockfile does not exist: %s" % lockfile_path.as_posix()
|
||||
)
|
||||
projectfile = cls.read_projectfile(lockfile_path.as_posix())
|
||||
if not lockfile_path.exists():
|
||||
if not data:
|
||||
path_str = lockfile_path.as_posix()
|
||||
if path_str[-5:] == ".lock":
|
||||
pipfile = Path(path_str[:-5])
|
||||
else:
|
||||
pipfile = project_path.joinpath("Pipfile")
|
||||
lf = cls.lockfile_from_pipfile(pipfile)
|
||||
else:
|
||||
lf = plette.lockfiles.Lockfile(data)
|
||||
projectfile.model = lf
|
||||
return projectfile
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, path, data, meta_from_project=True):
|
||||
"""Create a new lockfile instance from a dictionary.
|
||||
|
||||
:param str path: Path to the project root.
|
||||
:param dict data: Data to load into the lockfile.
|
||||
:param bool meta_from_project: Attempt to populate the meta section from the
|
||||
project root, default True.
|
||||
"""
|
||||
|
||||
if path is None:
|
||||
raise MissingParameter("path")
|
||||
if data is None:
|
||||
raise MissingParameter("data")
|
||||
if not isinstance(data, dict):
|
||||
raise TypeError("Expecting a dictionary for parameter 'data'")
|
||||
path = os.path.abspath(str(path))
|
||||
if os.path.isdir(path):
|
||||
project_path = path
|
||||
elif not os.path.isdir(path) and os.path.isdir(os.path.dirname(path)):
|
||||
project_path = os.path.dirname(path)
|
||||
pipfile_path = os.path.join(project_path, "Pipfile")
|
||||
lockfile_path = os.path.join(project_path, "Pipfile.lock")
|
||||
if meta_from_project:
|
||||
lockfile = cls.lockfile_from_pipfile(pipfile_path)
|
||||
lockfile.update(data)
|
||||
else:
|
||||
lockfile = plette.lockfiles.Lockfile(data)
|
||||
projectfile = ProjectFile(
|
||||
line_ending=DEFAULT_NEWLINES, location=lockfile_path, model=lockfile
|
||||
)
|
||||
return cls(
|
||||
projectfile=projectfile,
|
||||
lockfile=lockfile,
|
||||
newlines=projectfile.line_ending,
|
||||
path=Path(projectfile.location),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def load(cls, path, create=True):
|
||||
"""Create a new lockfile instance.
|
||||
|
||||
:param project_path: Path to project root or lockfile
|
||||
:type project_path: str or :class:`pathlib.Path`
|
||||
:param str lockfile_name: Name of the lockfile in the project root directory
|
||||
:param pipfile_path: Path to the project pipfile
|
||||
:type pipfile_path: :class:`pathlib.Path`
|
||||
:returns: A new lockfile representing the supplied project paths
|
||||
:rtype: :class:`~requirementslib.models.lockfile.Lockfile`
|
||||
"""
|
||||
|
||||
try:
|
||||
projectfile = cls.load_projectfile(path, create=create)
|
||||
except JSONDecodeError:
|
||||
path = os.path.abspath(path)
|
||||
path = Path(
|
||||
os.path.join(path, "Pipfile.lock") if os.path.isdir(path) else path
|
||||
)
|
||||
formatted_path = path.as_posix()
|
||||
backup_path = "%s.bak" % formatted_path
|
||||
LockfileCorruptException.show(formatted_path, backup_path=backup_path)
|
||||
path.rename(backup_path)
|
||||
cls.load(formatted_path, create=True)
|
||||
lockfile_path = Path(projectfile.location)
|
||||
creation_args = {
|
||||
"projectfile": projectfile,
|
||||
"lockfile": projectfile.model,
|
||||
"newlines": projectfile.line_ending,
|
||||
"path": lockfile_path,
|
||||
}
|
||||
return cls(**creation_args)
|
||||
|
||||
@classmethod
|
||||
def create(cls, path, create=True):
|
||||
return cls.load(path, create=create)
|
||||
|
||||
@property
|
||||
def develop(self):
|
||||
return self._lockfile.develop
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
return self._lockfile.default
|
||||
|
||||
def get_requirements(self, dev=True, only=False):
|
||||
"""Produces a generator which generates requirements from the desired section.
|
||||
|
||||
:param bool dev: Indicates whether to use dev requirements, defaults to False
|
||||
:return: Requirements from the relevant the relevant pipfile
|
||||
:rtype: :class:`~requirementslib.models.requirements.Requirement`
|
||||
"""
|
||||
|
||||
deps = self.get_deps(dev=dev, only=only)
|
||||
for k, v in deps.items():
|
||||
yield Requirement.from_pipfile(k, v)
|
||||
|
||||
@property
|
||||
def dev_requirements(self):
|
||||
if not self._dev_requirements:
|
||||
self._dev_requirements = list(self.get_requirements(dev=True, only=True))
|
||||
return self._dev_requirements
|
||||
|
||||
@property
|
||||
def requirements(self):
|
||||
if not self._requirements:
|
||||
self._requirements = list(self.get_requirements(dev=False, only=True))
|
||||
return self._requirements
|
||||
|
||||
@property
|
||||
def dev_requirements_list(self):
|
||||
return [{name: entry._data} for name, entry in self._lockfile.develop.items()]
|
||||
|
||||
@property
|
||||
def requirements_list(self):
|
||||
return [{name: entry._data} for name, entry in self._lockfile.default.items()]
|
||||
|
||||
def write(self):
|
||||
self.projectfile.model = copy.deepcopy(self._lockfile)
|
||||
self.projectfile.write()
|
||||
|
||||
def as_requirements(self, include_hashes=False, dev=False):
|
||||
"""Returns a list of requirements in pip-style format"""
|
||||
lines = []
|
||||
section = self.dev_requirements if dev else self.requirements
|
||||
for req in section:
|
||||
kwargs = {"include_hashes": include_hashes}
|
||||
if req.editable:
|
||||
kwargs["include_markers"] = False
|
||||
r = req.as_line(**kwargs)
|
||||
lines.append(r.strip())
|
||||
return lines
|
||||
735
Lib/site-packages/pipenv/vendor/requirementslib/models/markers.py
vendored
Normal file
735
Lib/site-packages/pipenv/vendor/requirementslib/models/markers.py
vendored
Normal file
@@ -0,0 +1,735 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import itertools
|
||||
import operator
|
||||
import re
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import distlib.markers
|
||||
import packaging.version
|
||||
import six
|
||||
from packaging.markers import InvalidMarker, Marker
|
||||
from packaging.specifiers import Specifier, SpecifierSet
|
||||
from vistir.compat import Mapping, Set, lru_cache
|
||||
from vistir.misc import dedup
|
||||
|
||||
from ..environment import MYPY_RUNNING
|
||||
from ..exceptions import RequirementError
|
||||
from .utils import filter_none, validate_markers
|
||||
|
||||
from six.moves import reduce # isort:skip
|
||||
|
||||
|
||||
if MYPY_RUNNING:
|
||||
from typing import Optional, List, Type, Any, Tuple, Union, AnyStr, Text, Iterator
|
||||
|
||||
STRING_TYPE = Union[str, bytes, Text]
|
||||
|
||||
|
||||
MAX_VERSIONS = {1: 7, 2: 7, 3: 11, 4: 0}
|
||||
DEPRECATED_VERSIONS = ["3.0", "3.1", "3.2", "3.3"]
|
||||
|
||||
|
||||
def is_instance(item, cls):
|
||||
# type: (Any, Type) -> bool
|
||||
if isinstance(item, cls) or item.__class__.__name__ == cls.__name__:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@attr.s
|
||||
class PipenvMarkers(object):
|
||||
"""System-level requirements - see PEP508 for more detail"""
|
||||
|
||||
os_name = attr.ib(default=None, validator=attr.validators.optional(validate_markers))
|
||||
sys_platform = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
platform_machine = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
platform_python_implementation = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
platform_release = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
platform_system = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
platform_version = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
python_version = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
python_full_version = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
implementation_name = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
implementation_version = attr.ib(
|
||||
default=None, validator=attr.validators.optional(validate_markers)
|
||||
)
|
||||
|
||||
@property
|
||||
def line_part(self):
|
||||
return " and ".join(
|
||||
[
|
||||
"{0} {1}".format(k, v)
|
||||
for k, v in attr.asdict(self, filter=filter_none).items()
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def pipfile_part(self):
|
||||
return {"markers": self.as_line}
|
||||
|
||||
@classmethod
|
||||
def make_marker(cls, marker_string):
|
||||
try:
|
||||
marker = Marker(marker_string)
|
||||
except InvalidMarker:
|
||||
raise RequirementError(
|
||||
"Invalid requirement: Invalid marker %r" % marker_string
|
||||
)
|
||||
return marker
|
||||
|
||||
@classmethod
|
||||
def from_line(cls, line):
|
||||
if ";" in line:
|
||||
line = line.rsplit(";", 1)[1].strip()
|
||||
marker = cls.make_marker(line)
|
||||
return marker
|
||||
|
||||
@classmethod
|
||||
def from_pipfile(cls, name, pipfile):
|
||||
attr_fields = [field.name for field in attr.fields(cls)]
|
||||
found_keys = [k for k in pipfile.keys() if k in attr_fields]
|
||||
marker_strings = ["{0} {1}".format(k, pipfile[k]) for k in found_keys]
|
||||
if pipfile.get("markers"):
|
||||
marker_strings.append(pipfile.get("markers"))
|
||||
markers = set()
|
||||
for marker in marker_strings:
|
||||
markers.add(marker)
|
||||
combined_marker = None
|
||||
try:
|
||||
combined_marker = cls.make_marker(" and ".join(sorted(markers)))
|
||||
except RequirementError:
|
||||
pass
|
||||
else:
|
||||
return combined_marker
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def _tuplize_version(version):
|
||||
# type: (STRING_TYPE) -> Tuple[int, ...]
|
||||
return tuple(int(x) for x in filter(lambda i: i != "*", version.split(".")))
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def _format_version(version):
|
||||
# type: (Tuple[int, ...]) -> STRING_TYPE
|
||||
if not isinstance(version, six.string_types):
|
||||
return ".".join(str(i) for i in version)
|
||||
return version
|
||||
|
||||
|
||||
# Prefer [x,y) ranges.
|
||||
REPLACE_RANGES = {">": ">=", "<=": "<"}
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def _format_pyspec(specifier):
|
||||
# type: (Union[STRING_TYPE, Specifier]) -> Specifier
|
||||
if isinstance(specifier, str):
|
||||
if not any(op in specifier for op in Specifier._operators.keys()):
|
||||
specifier = "=={0}".format(specifier)
|
||||
specifier = Specifier(specifier)
|
||||
version = getattr(specifier, "version", specifier).rstrip()
|
||||
if version and version.endswith("*"):
|
||||
if version.endswith(".*"):
|
||||
version = version[:-2]
|
||||
version = version.rstrip("*")
|
||||
specifier = Specifier("{0}{1}".format(specifier.operator, version))
|
||||
try:
|
||||
op = REPLACE_RANGES[specifier.operator]
|
||||
except KeyError:
|
||||
return specifier
|
||||
curr_tuple = _tuplize_version(version)
|
||||
try:
|
||||
next_tuple = (curr_tuple[0], curr_tuple[1] + 1)
|
||||
except IndexError:
|
||||
next_tuple = (curr_tuple[0], 1)
|
||||
if not next_tuple[1] <= MAX_VERSIONS[next_tuple[0]]:
|
||||
if specifier.operator == "<" and curr_tuple[1] <= MAX_VERSIONS[next_tuple[0]]:
|
||||
op = "<="
|
||||
next_tuple = (next_tuple[0], curr_tuple[1])
|
||||
else:
|
||||
return specifier
|
||||
specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple)))
|
||||
return specifier
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def _get_specs(specset):
|
||||
if specset is None:
|
||||
return
|
||||
if is_instance(specset, Specifier):
|
||||
new_specset = SpecifierSet()
|
||||
specs = set()
|
||||
specs.add(specset)
|
||||
new_specset._specs = frozenset(specs)
|
||||
specset = new_specset
|
||||
if isinstance(specset, str):
|
||||
specset = SpecifierSet(specset)
|
||||
result = []
|
||||
for spec in set(specset):
|
||||
version = spec.version
|
||||
op = spec.operator
|
||||
if op in ("in", "not in"):
|
||||
versions = version.split(",")
|
||||
op = "==" if op == "in" else "!="
|
||||
for ver in versions:
|
||||
result.append((op, _tuplize_version(ver.strip())))
|
||||
else:
|
||||
result.append((spec.operator, _tuplize_version(spec.version)))
|
||||
return sorted(result, key=operator.itemgetter(1))
|
||||
|
||||
|
||||
# TODO: Rename this to something meaningful
|
||||
def _group_by_op(specs):
|
||||
# type: (Union[Set[Specifier], SpecifierSet]) -> Iterator
|
||||
specs = [_get_specs(x) for x in list(specs)]
|
||||
flattened = [
|
||||
((op, len(version) > 2), version) for spec in specs for op, version in spec
|
||||
]
|
||||
specs = sorted(flattened)
|
||||
grouping = itertools.groupby(specs, key=operator.itemgetter(0))
|
||||
return grouping
|
||||
|
||||
|
||||
# TODO: rename this to something meaningful
|
||||
def normalize_specifier_set(specs):
|
||||
# type: (Union[str, SpecifierSet]) -> Optional[Set[Specifier]]
|
||||
"""Given a specifier set, a string, or an iterable, normalize the specifiers
|
||||
|
||||
.. note:: This function exists largely to deal with ``pyzmq`` which handles
|
||||
the ``requires_python`` specifier incorrectly, using ``3.7*`` rather than
|
||||
the correct form of ``3.7.*``. This workaround can likely go away if
|
||||
we ever introduce enforcement for metadata standards on PyPI.
|
||||
|
||||
:param Union[str, SpecifierSet] specs: Supplied specifiers to normalize
|
||||
:return: A new set of specifiers or specifierset
|
||||
:rtype: Union[Set[Specifier], :class:`~packaging.specifiers.SpecifierSet`]
|
||||
"""
|
||||
if not specs:
|
||||
return None
|
||||
if isinstance(specs, set):
|
||||
return specs
|
||||
# when we aren't dealing with a string at all, we can normalize this as usual
|
||||
elif not isinstance(specs, six.string_types):
|
||||
return {_format_pyspec(spec) for spec in specs}
|
||||
spec_list = []
|
||||
for spec in specs.split(","):
|
||||
spec = spec.strip()
|
||||
if spec.endswith(".*"):
|
||||
spec = spec[:-2]
|
||||
spec = spec.rstrip("*")
|
||||
spec_list.append(spec)
|
||||
return normalize_specifier_set(SpecifierSet(",".join(spec_list)))
|
||||
|
||||
|
||||
# TODO: Check if this is used by anything public otherwise make it private
|
||||
# And rename it to something meaningful
|
||||
def get_sorted_version_string(version_set):
|
||||
# type: (Set[AnyStr]) -> AnyStr
|
||||
version_list = sorted(
|
||||
"{0}".format(_format_version(version)) for version in version_set
|
||||
)
|
||||
version = ", ".join(version_list)
|
||||
return version
|
||||
|
||||
|
||||
# TODO: Rename this to something meaningful
|
||||
# TODO: Add a deprecation decorator and deprecate this -- i'm sure it's used
|
||||
# in other libraries
|
||||
@lru_cache(maxsize=1024)
|
||||
def cleanup_pyspecs(specs, joiner="or"):
|
||||
specs = normalize_specifier_set(specs)
|
||||
# for != operator we want to group by version
|
||||
# if all are consecutive, join as a list
|
||||
results = {}
|
||||
translation_map = {
|
||||
# if we are doing an or operation, we need to use the min for >=
|
||||
# this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6
|
||||
# if we do an AND operation we need to use MAX to be more selective
|
||||
(">", ">="): {
|
||||
"or": lambda x: _format_version(min(x)),
|
||||
"and": lambda x: _format_version(max(x)),
|
||||
},
|
||||
# we use inverse logic here so we will take the max value if we are
|
||||
# using OR but the min value if we are using AND
|
||||
("<", "<="): {
|
||||
"or": lambda x: _format_version(max(x)),
|
||||
"and": lambda x: _format_version(min(x)),
|
||||
},
|
||||
# leave these the same no matter what operator we use
|
||||
("!=", "==", "~=", "==="): {
|
||||
"or": get_sorted_version_string,
|
||||
"and": get_sorted_version_string,
|
||||
},
|
||||
}
|
||||
op_translations = {
|
||||
"!=": lambda x: "not in" if len(x) > 1 else "!=",
|
||||
"==": lambda x: "in" if len(x) > 1 else "==",
|
||||
}
|
||||
translation_keys = list(translation_map.keys())
|
||||
for op_and_version_type, versions in _group_by_op(tuple(specs)):
|
||||
op = op_and_version_type[0]
|
||||
versions = [version[1] for version in versions]
|
||||
versions = sorted(dedup(versions))
|
||||
op_key = next(iter(k for k in translation_keys if op in k), None)
|
||||
version_value = versions
|
||||
if op_key is not None:
|
||||
version_value = translation_map[op_key][joiner](versions)
|
||||
if op in op_translations:
|
||||
op = op_translations[op](versions)
|
||||
results[(op, op_and_version_type[1])] = version_value
|
||||
return sorted([(k[0], v) for k, v in results.items()], key=operator.itemgetter(1))
|
||||
|
||||
|
||||
# TODO: Rename this to something meaningful
|
||||
@lru_cache(maxsize=1024)
|
||||
def fix_version_tuple(version_tuple):
|
||||
# type: (Tuple[AnyStr, AnyStr]) -> Tuple[AnyStr, AnyStr]
|
||||
op, version = version_tuple
|
||||
max_major = max(MAX_VERSIONS.keys())
|
||||
if version[0] > max_major:
|
||||
return (op, (max_major, MAX_VERSIONS[max_major]))
|
||||
max_allowed = MAX_VERSIONS[version[0]]
|
||||
if op == "<" and version[1] > max_allowed and version[1] - 1 <= max_allowed:
|
||||
op = "<="
|
||||
version = (version[0], version[1] - 1)
|
||||
return (op, version)
|
||||
|
||||
|
||||
# TODO: Rename this to something meaningful, deprecate it (See prior function)
|
||||
@lru_cache(maxsize=128)
|
||||
def get_versions(specset, group_by_operator=True):
|
||||
# type: (Union[Set[Specifier], SpecifierSet], bool) -> List[Tuple[STRING_TYPE, STRING_TYPE]]
|
||||
specs = [_get_specs(x) for x in list(tuple(specset))]
|
||||
initial_sort_key = lambda k: (k[0], k[1])
|
||||
initial_grouping_key = operator.itemgetter(0)
|
||||
if not group_by_operator:
|
||||
initial_grouping_key = operator.itemgetter(1)
|
||||
initial_sort_key = operator.itemgetter(1)
|
||||
version_tuples = sorted(
|
||||
set((op, version) for spec in specs for op, version in spec), key=initial_sort_key
|
||||
)
|
||||
version_tuples = [fix_version_tuple(t) for t in version_tuples]
|
||||
op_groups = [
|
||||
(grp, list(map(operator.itemgetter(1), keys)))
|
||||
for grp, keys in itertools.groupby(version_tuples, key=initial_grouping_key)
|
||||
]
|
||||
versions = [
|
||||
(op, packaging.version.parse(".".join(str(v) for v in val)))
|
||||
for op, vals in op_groups
|
||||
for val in vals
|
||||
]
|
||||
return sorted(versions, key=operator.itemgetter(1))
|
||||
|
||||
|
||||
def _ensure_marker(marker):
|
||||
# type: (Union[STRING_TYPE, Marker]) -> Marker
|
||||
if not is_instance(marker, Marker):
|
||||
return Marker(str(marker))
|
||||
return marker
|
||||
|
||||
|
||||
def gen_marker(mkr):
|
||||
# type: (List[STRING_TYPE]) -> Marker
|
||||
m = Marker("python_version == '1'")
|
||||
m._markers.pop()
|
||||
m._markers.append(mkr)
|
||||
return m
|
||||
|
||||
|
||||
def _strip_extra(elements):
|
||||
"""Remove the "extra == ..." operands from the list."""
|
||||
|
||||
return _strip_marker_elem("extra", elements)
|
||||
|
||||
|
||||
def _strip_pyversion(elements):
|
||||
return _strip_marker_elem("python_version", elements)
|
||||
|
||||
|
||||
def _strip_marker_elem(elem_name, elements):
|
||||
"""Remove the supplied element from the marker.
|
||||
|
||||
This is not a comprehensive implementation, but relies on an important
|
||||
characteristic of metadata generation: The element's operand is always
|
||||
associated with an "and" operator. This means that we can simply remove the
|
||||
operand and the "and" operator associated with it.
|
||||
"""
|
||||
|
||||
extra_indexes = []
|
||||
preceding_operators = ["and"] if elem_name == "extra" else ["and", "or"]
|
||||
for i, element in enumerate(elements):
|
||||
if isinstance(element, list):
|
||||
cancelled = _strip_marker_elem(elem_name, element)
|
||||
if cancelled:
|
||||
extra_indexes.append(i)
|
||||
elif isinstance(element, tuple) and element[0].value == elem_name:
|
||||
extra_indexes.append(i)
|
||||
for i in reversed(extra_indexes):
|
||||
del elements[i]
|
||||
if i > 0 and elements[i - 1] in preceding_operators:
|
||||
# Remove the "and" before it.
|
||||
del elements[i - 1]
|
||||
elif elements:
|
||||
# This shouldn't ever happen, but is included for completeness.
|
||||
# If there is not an "and" before this element, try to remove the
|
||||
# operator after it.
|
||||
del elements[0]
|
||||
return not elements
|
||||
|
||||
|
||||
def _get_stripped_marker(marker, strip_func):
|
||||
"""Build a new marker which is cleaned according to `strip_func`"""
|
||||
|
||||
if not marker:
|
||||
return None
|
||||
marker = _ensure_marker(marker)
|
||||
elements = marker._markers
|
||||
strip_func(elements)
|
||||
if elements:
|
||||
return marker
|
||||
return None
|
||||
|
||||
|
||||
def get_without_extra(marker):
|
||||
"""Build a new marker without the `extra == ...` part.
|
||||
|
||||
The implementation relies very deep into packaging's internals, but I don't
|
||||
have a better way now (except implementing the whole thing myself).
|
||||
|
||||
This could return `None` if the `extra == ...` part is the only one in the
|
||||
input marker.
|
||||
"""
|
||||
|
||||
return _get_stripped_marker(marker, _strip_extra)
|
||||
|
||||
|
||||
def get_without_pyversion(marker):
|
||||
"""Built a new marker without the `python_version` part.
|
||||
|
||||
This could return `None` if the `python_version` section is the only section in the
|
||||
marker.
|
||||
"""
|
||||
|
||||
return _get_stripped_marker(marker, _strip_pyversion)
|
||||
|
||||
|
||||
def _markers_collect_extras(markers, collection):
|
||||
# Optimization: the marker element is usually appended at the end.
|
||||
for el in reversed(markers):
|
||||
if isinstance(el, tuple) and el[0].value == "extra" and el[1].value == "==":
|
||||
collection.add(el[2].value)
|
||||
elif isinstance(el, list):
|
||||
_markers_collect_extras(el, collection)
|
||||
|
||||
|
||||
def _markers_collect_pyversions(markers, collection):
|
||||
local_collection = []
|
||||
marker_format_str = "{0}"
|
||||
for i, el in enumerate(reversed(markers)):
|
||||
if isinstance(el, tuple) and el[0].value == "python_version":
|
||||
new_marker = str(gen_marker(el))
|
||||
local_collection.append(marker_format_str.format(new_marker))
|
||||
elif isinstance(el, list):
|
||||
_markers_collect_pyversions(el, local_collection)
|
||||
if local_collection:
|
||||
# local_collection = "{0}".format(" ".join(local_collection))
|
||||
collection.extend(local_collection)
|
||||
|
||||
|
||||
def _markers_contains_extra(markers):
|
||||
# Optimization: the marker element is usually appended at the end.
|
||||
return _markers_contains_key(markers, "extra")
|
||||
|
||||
|
||||
def _markers_contains_pyversion(markers):
|
||||
return _markers_contains_key(markers, "python_version")
|
||||
|
||||
|
||||
def _markers_contains_key(markers, key):
|
||||
for element in reversed(markers):
|
||||
if isinstance(element, tuple) and element[0].value == key:
|
||||
return True
|
||||
elif isinstance(element, list):
|
||||
if _markers_contains_key(element, key):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@lru_cache(maxsize=128)
|
||||
def get_contained_extras(marker):
|
||||
"""Collect "extra == ..." operands from a marker.
|
||||
|
||||
Returns a list of str. Each str is a speficied extra in this marker.
|
||||
"""
|
||||
if not marker:
|
||||
return set()
|
||||
extras = set()
|
||||
marker = _ensure_marker(marker)
|
||||
_markers_collect_extras(marker._markers, extras)
|
||||
return extras
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def get_contained_pyversions(marker):
|
||||
"""Collect all `python_version` operands from a marker.
|
||||
"""
|
||||
|
||||
collection = []
|
||||
if not marker:
|
||||
return set()
|
||||
marker = _ensure_marker(marker)
|
||||
# Collect the (Variable, Op, Value) tuples and string joiners from the marker
|
||||
_markers_collect_pyversions(marker._markers, collection)
|
||||
marker_str = " and ".join(sorted(collection))
|
||||
if not marker_str:
|
||||
return set()
|
||||
# Use the distlib dictionary parser to create a dictionary 'trie' which is a bit
|
||||
# easier to reason about
|
||||
marker_dict = distlib.markers.parse_marker(marker_str)[0]
|
||||
version_set = set()
|
||||
pyversions, _ = parse_marker_dict(marker_dict)
|
||||
if isinstance(pyversions, set):
|
||||
version_set.update(pyversions)
|
||||
elif pyversions is not None:
|
||||
version_set.add(pyversions)
|
||||
# Each distinct element in the set was separated by an "and" operator in the marker
|
||||
# So we will need to reduce them with an intersection here rather than a union
|
||||
# in order to find the boundaries
|
||||
versions = set()
|
||||
if version_set:
|
||||
versions = reduce(lambda x, y: x & y, version_set)
|
||||
return versions
|
||||
|
||||
|
||||
@lru_cache(maxsize=128)
|
||||
def contains_extra(marker):
|
||||
"""Check whehter a marker contains an "extra == ..." operand.
|
||||
"""
|
||||
if not marker:
|
||||
return False
|
||||
marker = _ensure_marker(marker)
|
||||
return _markers_contains_extra(marker._markers)
|
||||
|
||||
|
||||
@lru_cache(maxsize=128)
|
||||
def contains_pyversion(marker):
|
||||
"""Check whether a marker contains a python_version operand.
|
||||
"""
|
||||
|
||||
if not marker:
|
||||
return False
|
||||
marker = _ensure_marker(marker)
|
||||
return _markers_contains_pyversion(marker._markers)
|
||||
|
||||
|
||||
def _split_specifierset_str(specset_str, prefix="=="):
|
||||
# type: (str, str) -> Set[Specifier]
|
||||
"""
|
||||
Take a specifierset string and split it into a list to join for specifier sets
|
||||
|
||||
:param str specset_str: A string containing python versions, often comma separated
|
||||
:param str prefix: A prefix to use when generating the specifier set
|
||||
:return: A list of :class:`Specifier` instances generated with the provided prefix
|
||||
:rtype: Set[Specifier]
|
||||
"""
|
||||
specifiers = set()
|
||||
if "," not in specset_str and " " in specset_str:
|
||||
values = [v.strip() for v in specset_str.split()]
|
||||
else:
|
||||
values = [v.strip() for v in specset_str.split(",")]
|
||||
if prefix == "!=" and any(v in values for v in DEPRECATED_VERSIONS):
|
||||
values += DEPRECATED_VERSIONS[:]
|
||||
for value in sorted(values):
|
||||
specifiers.add(Specifier("{0}{1}".format(prefix, value)))
|
||||
return specifiers
|
||||
|
||||
|
||||
def _get_specifiers_from_markers(marker_item):
|
||||
"""
|
||||
Given a marker item, get specifiers from the version marker
|
||||
|
||||
:param :class:`~packaging.markers.Marker` marker_sequence: A marker describing a version constraint
|
||||
:return: A set of specifiers corresponding to the marker constraint
|
||||
:rtype: Set[Specifier]
|
||||
"""
|
||||
specifiers = set()
|
||||
if isinstance(marker_item, tuple):
|
||||
variable, op, value = marker_item
|
||||
if variable.value != "python_version":
|
||||
return specifiers
|
||||
if op.value == "in":
|
||||
specifiers.update(_split_specifierset_str(value.value, prefix="=="))
|
||||
elif op.value == "not in":
|
||||
specifiers.update(_split_specifierset_str(value.value, prefix="!="))
|
||||
else:
|
||||
specifiers.add(Specifier("{0}{1}".format(op.value, value.value)))
|
||||
elif isinstance(marker_item, list):
|
||||
parts = get_specset(marker_item)
|
||||
if parts:
|
||||
specifiers.update(parts)
|
||||
return specifiers
|
||||
|
||||
|
||||
def get_specset(marker_list):
|
||||
# type: (List) -> Optional[SpecifierSet]
|
||||
specset = set()
|
||||
_last_str = "and"
|
||||
for marker_parts in marker_list:
|
||||
if isinstance(marker_parts, str):
|
||||
_last_str = marker_parts # noqa
|
||||
else:
|
||||
specset.update(_get_specifiers_from_markers(marker_parts))
|
||||
specifiers = SpecifierSet()
|
||||
specifiers._specs = frozenset(specset)
|
||||
return specifiers
|
||||
|
||||
|
||||
# TODO: Refactor this (reduce complexity)
|
||||
def parse_marker_dict(marker_dict):
|
||||
op = marker_dict["op"]
|
||||
lhs = marker_dict["lhs"]
|
||||
rhs = marker_dict["rhs"]
|
||||
# This is where the spec sets for each side land if we have an "or" operator
|
||||
side_spec_list = []
|
||||
side_markers_list = []
|
||||
finalized_marker = ""
|
||||
# And if we hit the end of the parse tree we use this format string to make a marker
|
||||
format_string = "{lhs} {op} {rhs}"
|
||||
specset = SpecifierSet()
|
||||
specs = set()
|
||||
# Essentially we will iterate over each side of the parsed marker if either one is
|
||||
# A mapping instance (i.e. a dictionary) and recursively parse and reduce the specset
|
||||
# Union the "and" specs, intersect the "or"s to find the most appropriate range
|
||||
if any(issubclass(type(side), Mapping) for side in (lhs, rhs)):
|
||||
for side in (lhs, rhs):
|
||||
side_specs = set()
|
||||
side_markers = set()
|
||||
if issubclass(type(side), Mapping):
|
||||
merged_side_specs, merged_side_markers = parse_marker_dict(side)
|
||||
side_specs.update(merged_side_specs)
|
||||
side_markers.update(merged_side_markers)
|
||||
else:
|
||||
marker = _ensure_marker(side)
|
||||
marker_parts = getattr(marker, "_markers", [])
|
||||
if marker_parts[0][0].value == "python_version":
|
||||
side_specs |= set(get_specset(marker_parts))
|
||||
else:
|
||||
side_markers.add(str(marker))
|
||||
side_spec_list.append(side_specs)
|
||||
side_markers_list.append(side_markers)
|
||||
if op == "and":
|
||||
# When we are "and"-ing things together, it probably makes the most sense
|
||||
# to reduce them here into a single PySpec instance
|
||||
specs = reduce(lambda x, y: set(x) | set(y), side_spec_list)
|
||||
markers = reduce(lambda x, y: set(x) | set(y), side_markers_list)
|
||||
if not specs and not markers:
|
||||
return specset, finalized_marker
|
||||
if markers and isinstance(markers, (tuple, list, Set)):
|
||||
finalized_marker = Marker(" and ".join([m for m in markers if m]))
|
||||
elif markers:
|
||||
finalized_marker = str(markers)
|
||||
specset._specs = frozenset(specs)
|
||||
return specset, finalized_marker
|
||||
# Actually when we "or" things as well we can also just turn them into a reduced
|
||||
# set using this logic now
|
||||
sides = reduce(lambda x, y: set(x) & set(y), side_spec_list)
|
||||
finalized_marker = " or ".join(
|
||||
[normalize_marker_str(m) for m in side_markers_list]
|
||||
)
|
||||
specset._specs = frozenset(sorted(sides))
|
||||
return specset, finalized_marker
|
||||
else:
|
||||
# At the tip of the tree we are dealing with strings all around and they just need
|
||||
# to be smashed together
|
||||
specs = set()
|
||||
if lhs == "python_version":
|
||||
format_string = "{lhs}{op}{rhs}"
|
||||
marker = Marker(format_string.format(**marker_dict))
|
||||
marker_parts = getattr(marker, "_markers", [])
|
||||
_set = get_specset(marker_parts)
|
||||
if _set:
|
||||
specs |= set(_set)
|
||||
specset._specs = frozenset(specs)
|
||||
return specset, finalized_marker
|
||||
|
||||
|
||||
def _contains_micro_version(version_string):
|
||||
return re.search("\d+\.\d+\.\d+", version_string) is not None
|
||||
|
||||
|
||||
def format_pyversion(parts):
|
||||
op, val = parts
|
||||
version_marker = (
|
||||
"python_full_version" if _contains_micro_version(val) else "python_version"
|
||||
)
|
||||
return "{0} {1} '{2}'".format(version_marker, op, val)
|
||||
|
||||
|
||||
def normalize_marker_str(marker):
|
||||
# type: (Union[Marker, STRING_TYPE]) -> str
|
||||
marker_str = ""
|
||||
if not marker:
|
||||
return None
|
||||
if not is_instance(marker, Marker):
|
||||
marker = _ensure_marker(marker)
|
||||
pyversion = get_contained_pyversions(marker)
|
||||
marker = get_without_pyversion(marker)
|
||||
if pyversion:
|
||||
parts = cleanup_pyspecs(pyversion)
|
||||
marker_str = " and ".join([format_pyversion(pv) for pv in parts])
|
||||
if marker:
|
||||
if marker_str:
|
||||
marker_str = "{0!s} and {1!s}".format(marker_str, marker)
|
||||
else:
|
||||
marker_str = "{0!s}".format(marker)
|
||||
return marker_str.replace('"', "'")
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def marker_from_specifier(spec):
|
||||
# type: (STRING_TYPE) -> Marker
|
||||
if not any(spec.startswith(k) for k in Specifier._operators.keys()):
|
||||
if spec.strip().lower() in ["any", "<any>", "*"]:
|
||||
return None
|
||||
spec = "=={0}".format(spec)
|
||||
elif spec.startswith("==") and spec.count("=") > 3:
|
||||
spec = "=={0}".format(spec.lstrip("="))
|
||||
if not spec:
|
||||
return None
|
||||
marker_segments = []
|
||||
for marker_segment in cleanup_pyspecs(spec):
|
||||
marker_segments.append(format_pyversion(marker_segment))
|
||||
marker_str = " and ".join(marker_segments).replace('"', "'")
|
||||
return Marker(marker_str)
|
||||
|
||||
|
||||
def merge_markers(m1, m2):
|
||||
# type: (Marker, Marker) -> Optional[Marker]
|
||||
if not all((m1, m2)):
|
||||
return next(iter(v for v in (m1, m2) if v), None)
|
||||
m1 = _ensure_marker(m1)
|
||||
m2 = _ensure_marker(m2)
|
||||
_markers = [] # type: List[Marker]
|
||||
for marker in (m1, m2):
|
||||
_markers.append(str(marker))
|
||||
marker_str = " and ".join([normalize_marker_str(m) for m in _markers if m])
|
||||
return _ensure_marker(normalize_marker_str(marker_str))
|
||||
1240
Lib/site-packages/pipenv/vendor/requirementslib/models/metadata.py
vendored
Normal file
1240
Lib/site-packages/pipenv/vendor/requirementslib/models/metadata.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
384
Lib/site-packages/pipenv/vendor/requirementslib/models/pipfile.py
vendored
Normal file
384
Lib/site-packages/pipenv/vendor/requirementslib/models/pipfile.py
vendored
Normal file
@@ -0,0 +1,384 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import plette.models.base
|
||||
import plette.pipfiles
|
||||
import tomlkit
|
||||
from vistir.compat import FileNotFoundError, Path
|
||||
|
||||
from ..environment import MYPY_RUNNING
|
||||
from ..exceptions import RequirementError
|
||||
from ..utils import is_editable, is_vcs, merge_items
|
||||
from .project import ProjectFile
|
||||
from .requirements import Requirement
|
||||
from .utils import get_url_name, optional_instance_of, tomlkit_value_to_python
|
||||
|
||||
if MYPY_RUNNING:
|
||||
from typing import Union, Any, Dict, Iterable, Mapping, List, Text
|
||||
|
||||
package_type = Dict[Text, Dict[Text, Union[List[Text], Text]]]
|
||||
source_type = Dict[Text, Union[Text, bool]]
|
||||
sources_type = Iterable[source_type]
|
||||
meta_type = Dict[Text, Union[int, Dict[Text, Text], sources_type]]
|
||||
lockfile_type = Dict[Text, Union[package_type, meta_type]]
|
||||
|
||||
|
||||
is_pipfile = optional_instance_of(plette.pipfiles.Pipfile)
|
||||
is_path = optional_instance_of(Path)
|
||||
is_projectfile = optional_instance_of(ProjectFile)
|
||||
|
||||
|
||||
def reorder_source_keys(data):
|
||||
# type: (tomlkit.toml_document.TOMLDocument) -> tomlkit.toml_document.TOMLDocument
|
||||
sources = [] # type: sources_type
|
||||
for source_key in ["source", "sources"]:
|
||||
sources.extend(data.get(source_key, tomlkit.aot()).value)
|
||||
new_source_aot = tomlkit.aot()
|
||||
for entry in sources:
|
||||
table = tomlkit.table() # type: tomlkit.items.Table
|
||||
source_entry = PipfileLoader.populate_source(entry.copy())
|
||||
for key in ["name", "url", "verify_ssl"]:
|
||||
table.update({key: source_entry[key]})
|
||||
new_source_aot.append(table)
|
||||
data["source"] = new_source_aot
|
||||
if data.get("sources", None):
|
||||
del data["sources"]
|
||||
return data
|
||||
|
||||
|
||||
class PipfileLoader(plette.pipfiles.Pipfile):
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
# type: (tomlkit.toml_document.TOMLDocument) -> None
|
||||
for key, klass in plette.pipfiles.PIPFILE_SECTIONS.items():
|
||||
if key not in data or key == "sources":
|
||||
continue
|
||||
try:
|
||||
klass.validate(data[key])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def ensure_package_sections(cls, data):
|
||||
# type: (tomlkit.toml_document.TOMLDocument[Text, Any]) -> tomlkit.toml_document.TOMLDocument[Text, Any]
|
||||
"""
|
||||
Ensure that all pipfile package sections are present in the given toml document
|
||||
|
||||
:param :class:`~tomlkit.toml_document.TOMLDocument` data: The toml document to
|
||||
ensure package sections are present on
|
||||
:return: The updated toml document, ensuring ``packages`` and ``dev-packages``
|
||||
sections are present
|
||||
:rtype: :class:`~tomlkit.toml_document.TOMLDocument`
|
||||
"""
|
||||
package_keys = (
|
||||
k for k in plette.pipfiles.PIPFILE_SECTIONS.keys() if k.endswith("packages")
|
||||
)
|
||||
for key in package_keys:
|
||||
if key not in data:
|
||||
data.update({key: tomlkit.table()})
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def populate_source(cls, source):
|
||||
"""Derive missing values of source from the existing fields."""
|
||||
# Only URL pararemter is mandatory, let the KeyError be thrown.
|
||||
if "name" not in source:
|
||||
source["name"] = get_url_name(source["url"])
|
||||
if "verify_ssl" not in source:
|
||||
source["verify_ssl"] = "https://" in source["url"]
|
||||
if not isinstance(source["verify_ssl"], bool):
|
||||
source["verify_ssl"] = str(source["verify_ssl"]).lower() == "true"
|
||||
return source
|
||||
|
||||
@classmethod
|
||||
def load(cls, f, encoding=None):
|
||||
# type: (Any, Text) -> PipfileLoader
|
||||
content = f.read()
|
||||
if encoding is not None:
|
||||
content = content.decode(encoding)
|
||||
_data = tomlkit.loads(content)
|
||||
should_reload = "source" not in _data
|
||||
_data = reorder_source_keys(_data)
|
||||
if should_reload:
|
||||
if "sources" in _data:
|
||||
content = tomlkit.dumps(_data)
|
||||
else:
|
||||
# HACK: There is no good way to prepend a section to an existing
|
||||
# TOML document, but there's no good way to copy non-structural
|
||||
# content from one TOML document to another either. Modify the
|
||||
# TOML content directly, and load the new in-memory document.
|
||||
sep = "" if content.startswith("\n") else "\n"
|
||||
content = plette.pipfiles.DEFAULT_SOURCE_TOML + sep + content
|
||||
data = tomlkit.loads(content)
|
||||
data = cls.ensure_package_sections(data)
|
||||
instance = cls(data)
|
||||
instance._data = dict(instance._data)
|
||||
return instance
|
||||
|
||||
def __contains__(self, key):
|
||||
# type: (Text) -> bool
|
||||
if key not in self._data:
|
||||
package_keys = self._data.get("packages", {}).keys()
|
||||
dev_package_keys = self._data.get("dev-packages", {}).keys()
|
||||
return any(key in pkg_list for pkg_list in (package_keys, dev_package_keys))
|
||||
return True
|
||||
|
||||
def __getattribute__(self, key):
|
||||
# type: (Text) -> Any
|
||||
if key == "source":
|
||||
return self._data[key]
|
||||
return super(PipfileLoader, self).__getattribute__(key)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Pipfile(object):
|
||||
path = attr.ib(validator=is_path, type=Path)
|
||||
projectfile = attr.ib(validator=is_projectfile, type=ProjectFile)
|
||||
_pipfile = attr.ib(type=PipfileLoader)
|
||||
_pyproject = attr.ib(
|
||||
default=attr.Factory(tomlkit.document), type=tomlkit.toml_document.TOMLDocument
|
||||
)
|
||||
build_system = attr.ib(default=attr.Factory(dict), type=dict)
|
||||
_requirements = attr.ib(default=attr.Factory(list), type=list)
|
||||
_dev_requirements = attr.ib(default=attr.Factory(list), type=list)
|
||||
|
||||
@path.default
|
||||
def _get_path(self):
|
||||
# type: () -> Path
|
||||
return Path(os.curdir).absolute()
|
||||
|
||||
@projectfile.default
|
||||
def _get_projectfile(self):
|
||||
# type: () -> ProjectFile
|
||||
return self.load_projectfile(os.curdir, create=False)
|
||||
|
||||
@_pipfile.default
|
||||
def _get_pipfile(self):
|
||||
# type: () -> Union[plette.pipfiles.Pipfile, PipfileLoader]
|
||||
return self.projectfile.model
|
||||
|
||||
@property
|
||||
def root(self):
|
||||
return self.path.parent
|
||||
|
||||
@property
|
||||
def extended_keys(self):
|
||||
return [
|
||||
k
|
||||
for k in itertools.product(
|
||||
("packages", "dev-packages"), ("", "vcs", "editable")
|
||||
)
|
||||
]
|
||||
|
||||
@property
|
||||
def pipfile(self):
|
||||
# type: () -> Union[PipfileLoader, plette.pipfiles.Pipfile]
|
||||
return self._pipfile
|
||||
|
||||
def get_deps(self, dev=False, only=True):
|
||||
# type: (bool, bool) -> Dict[Text, Dict[Text, Union[List[Text], Text]]]
|
||||
deps = {} # type: Dict[Text, Dict[Text, Union[List[Text], Text]]]
|
||||
if dev:
|
||||
deps.update(dict(self.pipfile._data.get("dev-packages", {})))
|
||||
if only:
|
||||
return deps
|
||||
return tomlkit_value_to_python(
|
||||
merge_items([deps, dict(self.pipfile._data.get("packages", {}))])
|
||||
)
|
||||
|
||||
def get(self, k):
|
||||
# type: (Text) -> Any
|
||||
return self.__getitem__(k)
|
||||
|
||||
def __contains__(self, k):
|
||||
# type: (Text) -> bool
|
||||
check_pipfile = k in self.extended_keys or self.pipfile.__contains__(k)
|
||||
if check_pipfile:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __getitem__(self, k, *args, **kwargs):
|
||||
# type: ignore
|
||||
retval = None
|
||||
pipfile = self._pipfile
|
||||
section = None
|
||||
pkg_type = None
|
||||
try:
|
||||
retval = pipfile[k]
|
||||
except KeyError:
|
||||
if "-" in k:
|
||||
section, _, pkg_type = k.rpartition("-")
|
||||
vals = getattr(pipfile.get(section, {}), "_data", {})
|
||||
vals = tomlkit_value_to_python(vals)
|
||||
if pkg_type == "vcs":
|
||||
retval = {k: v for k, v in vals.items() if is_vcs(v)}
|
||||
elif pkg_type == "editable":
|
||||
retval = {k: v for k, v in vals.items() if is_editable(v)}
|
||||
if retval is None:
|
||||
raise
|
||||
else:
|
||||
retval = getattr(retval, "_data", retval)
|
||||
return retval
|
||||
|
||||
def __getattr__(self, k, *args, **kwargs):
|
||||
# type: ignore
|
||||
retval = None
|
||||
pipfile = super(Pipfile, self).__getattribute__("_pipfile")
|
||||
try:
|
||||
retval = super(Pipfile, self).__getattribute__(k)
|
||||
except AttributeError:
|
||||
retval = getattr(pipfile, k, None)
|
||||
if retval is not None:
|
||||
return retval
|
||||
return super(Pipfile, self).__getattribute__(k, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def requires_python(self):
|
||||
# type: () -> bool
|
||||
return getattr(
|
||||
self._pipfile.requires,
|
||||
"python_version",
|
||||
getattr(self._pipfile.requires, "python_full_version", None),
|
||||
)
|
||||
|
||||
@property
|
||||
def allow_prereleases(self):
|
||||
# type: () -> bool
|
||||
return self._pipfile.get("pipenv", {}).get("allow_prereleases", False)
|
||||
|
||||
@classmethod
|
||||
def read_projectfile(cls, path):
|
||||
# type: (Text) -> ProjectFile
|
||||
"""Read the specified project file and provide an interface for writing/updating.
|
||||
|
||||
:param Text path: Path to the target file.
|
||||
:return: A project file with the model and location for interaction
|
||||
:rtype: :class:`~requirementslib.models.project.ProjectFile`
|
||||
"""
|
||||
pf = ProjectFile.read(path, PipfileLoader, invalid_ok=True)
|
||||
return pf
|
||||
|
||||
@classmethod
|
||||
def load_projectfile(cls, path, create=False):
|
||||
# type: (Text, bool) -> ProjectFile
|
||||
"""
|
||||
Given a path, load or create the necessary pipfile.
|
||||
|
||||
:param Text path: Path to the project root or pipfile
|
||||
:param bool create: Whether to create the pipfile if not found, defaults to True
|
||||
:raises OSError: Thrown if the project root directory doesn't exist
|
||||
:raises FileNotFoundError: Thrown if the pipfile doesn't exist and ``create=False``
|
||||
:return: A project file instance for the supplied project
|
||||
:rtype: :class:`~requirementslib.models.project.ProjectFile`
|
||||
"""
|
||||
if not path:
|
||||
raise RuntimeError("Must pass a path to classmethod 'Pipfile.load'")
|
||||
if not isinstance(path, Path):
|
||||
path = Path(path).absolute()
|
||||
pipfile_path = path if path.is_file() else path.joinpath("Pipfile")
|
||||
project_path = pipfile_path.parent
|
||||
if not project_path.exists():
|
||||
raise FileNotFoundError("%s is not a valid project path!" % path)
|
||||
elif not pipfile_path.exists() or not pipfile_path.is_file():
|
||||
if not create:
|
||||
raise RequirementError("%s is not a valid Pipfile" % pipfile_path)
|
||||
return cls.read_projectfile(pipfile_path.as_posix())
|
||||
|
||||
@classmethod
|
||||
def load(cls, path, create=False):
|
||||
# type: (Text, bool) -> Pipfile
|
||||
"""
|
||||
Given a path, load or create the necessary pipfile.
|
||||
|
||||
:param Text path: Path to the project root or pipfile
|
||||
:param bool create: Whether to create the pipfile if not found, defaults to True
|
||||
:raises OSError: Thrown if the project root directory doesn't exist
|
||||
:raises FileNotFoundError: Thrown if the pipfile doesn't exist and ``create=False``
|
||||
:return: A pipfile instance pointing at the supplied project
|
||||
:rtype:: class:`~requirementslib.models.pipfile.Pipfile`
|
||||
"""
|
||||
|
||||
projectfile = cls.load_projectfile(path, create=create)
|
||||
pipfile = projectfile.model
|
||||
creation_args = {
|
||||
"projectfile": projectfile,
|
||||
"pipfile": pipfile,
|
||||
"path": Path(projectfile.location),
|
||||
}
|
||||
return cls(**creation_args)
|
||||
|
||||
def write(self):
|
||||
# type: () -> None
|
||||
self.projectfile.model = copy.deepcopy(self._pipfile)
|
||||
self.projectfile.write()
|
||||
|
||||
@property
|
||||
def dev_packages(self):
|
||||
# type: () -> List[Requirement]
|
||||
return self.dev_requirements
|
||||
|
||||
@property
|
||||
def packages(self):
|
||||
# type: () -> List[Requirement]
|
||||
return self.requirements
|
||||
|
||||
@property
|
||||
def dev_requirements(self):
|
||||
# type: () -> List[Requirement]
|
||||
if not self._dev_requirements:
|
||||
packages = tomlkit_value_to_python(self.pipfile.get("dev-packages", {}))
|
||||
self._dev_requirements = [
|
||||
Requirement.from_pipfile(k, v)
|
||||
for k, v in packages.items()
|
||||
if v is not None
|
||||
]
|
||||
return self._dev_requirements
|
||||
|
||||
@property
|
||||
def requirements(self):
|
||||
# type: () -> List[Requirement]
|
||||
if not self._requirements:
|
||||
packages = tomlkit_value_to_python(self.pipfile.get("packages", {}))
|
||||
self._requirements = [
|
||||
Requirement.from_pipfile(k, v)
|
||||
for k, v in packages.items()
|
||||
if v is not None
|
||||
]
|
||||
return self._requirements
|
||||
|
||||
def _read_pyproject(self):
|
||||
# type: () -> None
|
||||
pyproject = self.path.parent.joinpath("pyproject.toml")
|
||||
if pyproject.exists():
|
||||
self._pyproject = tomlkit.loads(pyproject.read_text())
|
||||
build_system = self._pyproject.get("build-system", None)
|
||||
if build_system and not build_system.get("build_backend"):
|
||||
build_system["build-backend"] = "setuptools.build_meta:__legacy__"
|
||||
elif not build_system or not build_system.get("requires"):
|
||||
build_system = {
|
||||
"requires": ["setuptools>=40.8", "wheel"],
|
||||
"build-backend": "setuptools.build_meta:__legacy__",
|
||||
}
|
||||
self.build_system = build_system
|
||||
|
||||
@property
|
||||
def build_requires(self):
|
||||
# type: () -> List[Text]
|
||||
if not self.build_system:
|
||||
self._read_pyproject()
|
||||
return self.build_system.get("requires", [])
|
||||
|
||||
@property
|
||||
def build_backend(self):
|
||||
# type: () -> Text
|
||||
pyproject = self.path.parent.joinpath("pyproject.toml")
|
||||
if not self.build_system:
|
||||
self._read_pyproject()
|
||||
return self.build_system.get("build-backend", None)
|
||||
234
Lib/site-packages/pipenv/vendor/requirementslib/models/project.py
vendored
Normal file
234
Lib/site-packages/pipenv/vendor/requirementslib/models/project.py
vendored
Normal file
@@ -0,0 +1,234 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import collections
|
||||
import io
|
||||
import os
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import packaging.markers
|
||||
import packaging.utils
|
||||
import plette
|
||||
import plette.models
|
||||
import six
|
||||
import tomlkit
|
||||
from vistir.compat import FileNotFoundError
|
||||
|
||||
SectionDifference = collections.namedtuple("SectionDifference", ["inthis", "inthat"])
|
||||
FileDifference = collections.namedtuple("FileDifference", ["default", "develop"])
|
||||
|
||||
|
||||
def _are_pipfile_entries_equal(a, b):
|
||||
a = {k: v for k, v in a.items() if k not in ("markers", "hashes", "hash")}
|
||||
b = {k: v for k, v in b.items() if k not in ("markers", "hashes", "hash")}
|
||||
if a != b:
|
||||
return False
|
||||
try:
|
||||
marker_eval_a = packaging.markers.Marker(a["markers"]).evaluate()
|
||||
except (AttributeError, KeyError, TypeError, ValueError):
|
||||
marker_eval_a = True
|
||||
try:
|
||||
marker_eval_b = packaging.markers.Marker(b["markers"]).evaluate()
|
||||
except (AttributeError, KeyError, TypeError, ValueError):
|
||||
marker_eval_b = True
|
||||
return marker_eval_a == marker_eval_b
|
||||
|
||||
|
||||
DEFAULT_NEWLINES = "\n"
|
||||
|
||||
|
||||
def preferred_newlines(f):
|
||||
if isinstance(f.newlines, six.text_type):
|
||||
return f.newlines
|
||||
return DEFAULT_NEWLINES
|
||||
|
||||
|
||||
@attr.s
|
||||
class ProjectFile(object):
|
||||
"""A file in the Pipfile project.
|
||||
"""
|
||||
|
||||
location = attr.ib()
|
||||
line_ending = attr.ib()
|
||||
model = attr.ib()
|
||||
|
||||
@classmethod
|
||||
def read(cls, location, model_cls, invalid_ok=False):
|
||||
if not os.path.exists(location) and not invalid_ok:
|
||||
raise FileNotFoundError(location)
|
||||
try:
|
||||
with io.open(location, encoding="utf-8") as f:
|
||||
model = model_cls.load(f)
|
||||
line_ending = preferred_newlines(f)
|
||||
except Exception:
|
||||
if not invalid_ok:
|
||||
raise
|
||||
model = None
|
||||
line_ending = DEFAULT_NEWLINES
|
||||
return cls(location=location, line_ending=line_ending, model=model)
|
||||
|
||||
def write(self):
|
||||
kwargs = {"encoding": "utf-8", "newline": self.line_ending}
|
||||
with io.open(self.location, "w", **kwargs) as f:
|
||||
self.model.dump(f)
|
||||
|
||||
def dumps(self):
|
||||
strio = six.StringIO()
|
||||
self.model.dump(strio)
|
||||
return strio.getvalue()
|
||||
|
||||
|
||||
@attr.s
|
||||
class Project(object):
|
||||
|
||||
root = attr.ib()
|
||||
_p = attr.ib(init=False)
|
||||
_l = attr.ib(init=False)
|
||||
|
||||
def __attrs_post_init__(self):
|
||||
self.root = root = os.path.abspath(self.root)
|
||||
self._p = ProjectFile.read(os.path.join(root, "Pipfile"), plette.Pipfile)
|
||||
self._l = ProjectFile.read(
|
||||
os.path.join(root, "Pipfile.lock"), plette.Lockfile, invalid_ok=True
|
||||
)
|
||||
|
||||
@property
|
||||
def pipfile(self):
|
||||
return self._p.model
|
||||
|
||||
@property
|
||||
def pipfile_location(self):
|
||||
return self._p.location
|
||||
|
||||
@property
|
||||
def lockfile(self):
|
||||
return self._l.model
|
||||
|
||||
@property
|
||||
def lockfile_location(self):
|
||||
return self._l.location
|
||||
|
||||
@lockfile.setter
|
||||
def lockfile(self, new):
|
||||
self._l.model = new
|
||||
|
||||
def is_synced(self):
|
||||
return self.lockfile and self.lockfile.is_up_to_date(self.pipfile)
|
||||
|
||||
def _get_pipfile_section(self, develop, insert=True):
|
||||
name = "dev-packages" if develop else "packages"
|
||||
try:
|
||||
section = self.pipfile[name]
|
||||
except KeyError:
|
||||
section = plette.models.PackageCollection(tomlkit.table())
|
||||
if insert:
|
||||
self.pipfile[name] = section
|
||||
return section
|
||||
|
||||
def contains_key_in_pipfile(self, key):
|
||||
sections = [
|
||||
self._get_pipfile_section(develop=False, insert=False),
|
||||
self._get_pipfile_section(develop=True, insert=False),
|
||||
]
|
||||
return any(
|
||||
(
|
||||
packaging.utils.canonicalize_name(name)
|
||||
== packaging.utils.canonicalize_name(key)
|
||||
)
|
||||
for section in sections
|
||||
for name in section
|
||||
)
|
||||
|
||||
def add_line_to_pipfile(self, line, develop):
|
||||
from requirementslib import Requirement
|
||||
|
||||
requirement = Requirement.from_line(line)
|
||||
section = self._get_pipfile_section(develop=develop)
|
||||
key = requirement.normalized_name
|
||||
entry = next(iter(requirement.as_pipfile().values()))
|
||||
if isinstance(entry, dict):
|
||||
# HACK: TOMLKit prefers to expand tables by default, but we
|
||||
# always want inline tables here. Also tomlkit.inline_table
|
||||
# does not have `update()`.
|
||||
table = tomlkit.inline_table()
|
||||
for k, v in entry.items():
|
||||
table[k] = v
|
||||
entry = table
|
||||
section[key] = entry
|
||||
|
||||
def remove_keys_from_pipfile(self, keys, default, develop):
|
||||
keys = {packaging.utils.canonicalize_name(key) for key in keys}
|
||||
sections = []
|
||||
if default:
|
||||
sections.append(self._get_pipfile_section(develop=False, insert=False))
|
||||
if develop:
|
||||
sections.append(self._get_pipfile_section(develop=True, insert=False))
|
||||
for section in sections:
|
||||
removals = set()
|
||||
for name in section:
|
||||
if packaging.utils.canonicalize_name(name) in keys:
|
||||
removals.add(name)
|
||||
for key in removals:
|
||||
del section._data[key]
|
||||
|
||||
def remove_keys_from_lockfile(self, keys):
|
||||
keys = {packaging.utils.canonicalize_name(key) for key in keys}
|
||||
removed = False
|
||||
for section_name in ("default", "develop"):
|
||||
try:
|
||||
section = self.lockfile[section_name]
|
||||
except KeyError:
|
||||
continue
|
||||
removals = set()
|
||||
for name in section:
|
||||
if packaging.utils.canonicalize_name(name) in keys:
|
||||
removals.add(name)
|
||||
removed = removed or bool(removals)
|
||||
for key in removals:
|
||||
del section._data[key]
|
||||
|
||||
if removed:
|
||||
# HACK: The lock file no longer represents the Pipfile at this
|
||||
# point. Set the hash to an arbitrary invalid value.
|
||||
self.lockfile.meta.hash = plette.models.Hash({"__invalid__": ""})
|
||||
|
||||
def difference_lockfile(self, lockfile):
|
||||
"""Generate a difference between the current and given lockfiles.
|
||||
|
||||
Returns a 2-tuple containing differences in default in develop
|
||||
sections.
|
||||
|
||||
Each element is a 2-tuple of dicts. The first, `inthis`, contains
|
||||
entries only present in the current lockfile; the second, `inthat`,
|
||||
contains entries only present in the given one.
|
||||
|
||||
If a key exists in both this and that, but the values differ, the key
|
||||
is present in both dicts, pointing to values from each file.
|
||||
"""
|
||||
diff_data = {
|
||||
"default": SectionDifference({}, {}),
|
||||
"develop": SectionDifference({}, {}),
|
||||
}
|
||||
for section_name, section_diff in diff_data.items():
|
||||
try:
|
||||
this = self.lockfile[section_name]._data
|
||||
except (KeyError, TypeError):
|
||||
this = {}
|
||||
try:
|
||||
that = lockfile[section_name]._data
|
||||
except (KeyError, TypeError):
|
||||
that = {}
|
||||
for key, this_value in this.items():
|
||||
try:
|
||||
that_value = that[key]
|
||||
except KeyError:
|
||||
section_diff.inthis[key] = this_value
|
||||
continue
|
||||
if not _are_pipfile_entries_equal(this_value, that_value):
|
||||
section_diff.inthis[key] = this_value
|
||||
section_diff.inthat[key] = that_value
|
||||
for key, that_value in that.items():
|
||||
if key not in this:
|
||||
section_diff.inthat[key] = that_value
|
||||
return FileDifference(**diff_data)
|
||||
3154
Lib/site-packages/pipenv/vendor/requirementslib/models/requirements.py
vendored
Normal file
3154
Lib/site-packages/pipenv/vendor/requirementslib/models/requirements.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
255
Lib/site-packages/pipenv/vendor/requirementslib/models/resolvers.py
vendored
Normal file
255
Lib/site-packages/pipenv/vendor/requirementslib/models/resolvers.py
vendored
Normal file
@@ -0,0 +1,255 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
from contextlib import contextmanager
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import six
|
||||
from pip_shims.shims import Wheel
|
||||
|
||||
from .cache import HashCache
|
||||
from .utils import format_requirement, is_pinned_requirement, version_from_ireq
|
||||
|
||||
|
||||
class ResolutionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@attr.s
|
||||
class DependencyResolver(object):
|
||||
pinned_deps = attr.ib(default=attr.Factory(dict))
|
||||
#: A dictionary of abstract dependencies by name
|
||||
dep_dict = attr.ib(default=attr.Factory(dict))
|
||||
#: A dictionary of sets of version numbers that are valid for a candidate currently
|
||||
candidate_dict = attr.ib(default=attr.Factory(dict))
|
||||
#: A historical record of pins
|
||||
pin_history = attr.ib(default=attr.Factory(dict))
|
||||
#: Whether to allow prerelease dependencies
|
||||
allow_prereleases = attr.ib(default=False)
|
||||
#: Stores hashes for each dependency
|
||||
hashes = attr.ib(default=attr.Factory(dict))
|
||||
#: A hash cache
|
||||
hash_cache = attr.ib(default=attr.Factory(HashCache))
|
||||
#: A finder for searching the index
|
||||
finder = attr.ib(default=None)
|
||||
#: Whether to include hashes even from incompatible wheels
|
||||
include_incompatible_hashes = attr.ib(default=True)
|
||||
#: A cache for storing available canddiates when using all wheels
|
||||
_available_candidates_cache = attr.ib(default=attr.Factory(dict))
|
||||
|
||||
@classmethod
|
||||
def create(cls, finder=None, allow_prereleases=False, get_all_hashes=True):
|
||||
if not finder:
|
||||
from .dependencies import get_finder
|
||||
|
||||
finder_args = []
|
||||
if allow_prereleases:
|
||||
finder_args.append("--pre")
|
||||
finder = get_finder(*finder_args)
|
||||
creation_kwargs = {
|
||||
"allow_prereleases": allow_prereleases,
|
||||
"include_incompatible_hashes": get_all_hashes,
|
||||
"finder": finder,
|
||||
"hash_cache": HashCache(),
|
||||
}
|
||||
resolver = cls(**creation_kwargs)
|
||||
return resolver
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
return list(self.dep_dict.values())
|
||||
|
||||
@property
|
||||
def resolution(self):
|
||||
return list(self.pinned_deps.values())
|
||||
|
||||
def add_abstract_dep(self, dep):
|
||||
"""Add an abstract dependency by either creating a new entry or
|
||||
merging with an old one.
|
||||
|
||||
:param dep: An abstract dependency to add
|
||||
:type dep: :class:`~requirementslib.models.dependency.AbstractDependency`
|
||||
:raises ResolutionError: Raised when the given dependency is not compatible with
|
||||
an existing abstract dependency.
|
||||
"""
|
||||
|
||||
if dep.name in self.dep_dict:
|
||||
compatible_versions = self.dep_dict[dep.name].compatible_versions(dep)
|
||||
if compatible_versions:
|
||||
self.candidate_dict[dep.name] = compatible_versions
|
||||
self.dep_dict[dep.name] = self.dep_dict[dep.name].compatible_abstract_dep(
|
||||
dep
|
||||
)
|
||||
else:
|
||||
raise ResolutionError
|
||||
else:
|
||||
self.candidate_dict[dep.name] = dep.version_set
|
||||
self.dep_dict[dep.name] = dep
|
||||
|
||||
def pin_deps(self):
|
||||
"""Pins the current abstract dependencies and adds them to the history dict.
|
||||
|
||||
Adds any new dependencies to the abstract dependencies already present by
|
||||
merging them together to form new, compatible abstract dependencies.
|
||||
"""
|
||||
|
||||
for name in list(self.dep_dict.keys()):
|
||||
candidates = self.dep_dict[name].candidates[:]
|
||||
abs_dep = self.dep_dict[name]
|
||||
while candidates:
|
||||
pin = candidates.pop()
|
||||
# Move on from existing pins if the new pin isn't compatible
|
||||
if name in self.pinned_deps:
|
||||
if self.pinned_deps[name].editable:
|
||||
continue
|
||||
old_version = version_from_ireq(self.pinned_deps[name])
|
||||
if not pin.editable:
|
||||
new_version = version_from_ireq(pin)
|
||||
if (
|
||||
new_version != old_version
|
||||
and new_version not in self.candidate_dict[name]
|
||||
):
|
||||
continue
|
||||
pin.parent = abs_dep.parent
|
||||
pin_subdeps = self.dep_dict[name].get_deps(pin)
|
||||
backup = self.dep_dict.copy(), self.candidate_dict.copy()
|
||||
try:
|
||||
for pin_dep in pin_subdeps:
|
||||
self.add_abstract_dep(pin_dep)
|
||||
except ResolutionError:
|
||||
self.dep_dict, self.candidate_dict = backup
|
||||
continue
|
||||
else:
|
||||
self.pinned_deps[name] = pin
|
||||
break
|
||||
|
||||
def resolve(self, root_nodes, max_rounds=20):
|
||||
"""Resolves dependencies using a backtracking resolver and multiple endpoints.
|
||||
|
||||
Note: this resolver caches aggressively.
|
||||
Runs for *max_rounds* or until any two pinning rounds yield the same outcome.
|
||||
|
||||
:param root_nodes: A list of the root requirements.
|
||||
:type root_nodes: list[:class:`~requirementslib.models.requirements.Requirement`]
|
||||
:param max_rounds: The max number of resolution rounds, defaults to 20
|
||||
:param max_rounds: int, optional
|
||||
:raises RuntimeError: Raised when max rounds is exceeded without a resolution.
|
||||
"""
|
||||
if self.dep_dict:
|
||||
raise RuntimeError("Do not use the same resolver more than once")
|
||||
|
||||
if not self.hash_cache:
|
||||
self.hash_cache = HashCache()
|
||||
|
||||
# Coerce input into AbstractDependency instances.
|
||||
# We accept str, Requirement, and AbstractDependency as input.
|
||||
from .dependencies import AbstractDependency
|
||||
from ..utils import log
|
||||
|
||||
for dep in root_nodes:
|
||||
if isinstance(dep, six.string_types):
|
||||
dep = AbstractDependency.from_string(dep)
|
||||
elif not isinstance(dep, AbstractDependency):
|
||||
dep = AbstractDependency.from_requirement(dep)
|
||||
self.add_abstract_dep(dep)
|
||||
|
||||
for round_ in range(max_rounds):
|
||||
self.pin_deps()
|
||||
self.pin_history[round_] = self.pinned_deps.copy()
|
||||
|
||||
if round_ > 0:
|
||||
previous_round = set(self.pin_history[round_ - 1].values())
|
||||
current_values = set(self.pin_history[round_].values())
|
||||
difference = current_values - previous_round
|
||||
else:
|
||||
difference = set(self.pin_history[round_].values())
|
||||
|
||||
log.debug("\n")
|
||||
log.debug("{:=^30}".format(" Round {0} ".format(round_)))
|
||||
log.debug("\n")
|
||||
if difference:
|
||||
log.debug("New Packages: ")
|
||||
for d in difference:
|
||||
log.debug("{:>30}".format(format_requirement(d)))
|
||||
elif round_ >= 3:
|
||||
log.debug("Stable Pins: ")
|
||||
for d in current_values:
|
||||
log.debug("{:>30}".format(format_requirement(d)))
|
||||
return
|
||||
else:
|
||||
log.debug("No New Packages.")
|
||||
# TODO: Raise a better error.
|
||||
raise RuntimeError("cannot resolve after {} rounds".format(max_rounds))
|
||||
|
||||
def get_hashes(self):
|
||||
for dep in self.pinned_deps.values():
|
||||
if dep.name not in self.hashes:
|
||||
self.hashes[dep.name] = self.get_hashes_for_one(dep)
|
||||
return self.hashes.copy()
|
||||
|
||||
def get_hashes_for_one(self, ireq):
|
||||
if not self.finder:
|
||||
from .dependencies import get_finder
|
||||
|
||||
finder_args = []
|
||||
if self.allow_prereleases:
|
||||
finder_args.append("--pre")
|
||||
self.finder = get_finder(*finder_args)
|
||||
|
||||
if ireq.editable:
|
||||
return set()
|
||||
|
||||
from pip_shims import VcsSupport
|
||||
|
||||
vcs = VcsSupport()
|
||||
if (
|
||||
ireq.link
|
||||
and ireq.link.scheme in vcs.all_schemes
|
||||
and "ssh" in ireq.link.scheme
|
||||
):
|
||||
return set()
|
||||
|
||||
if not is_pinned_requirement(ireq):
|
||||
raise TypeError("Expected pinned requirement, got {}".format(ireq))
|
||||
|
||||
matching_candidates = set()
|
||||
with self.allow_all_wheels():
|
||||
from .dependencies import find_all_matches
|
||||
|
||||
matching_candidates = find_all_matches(
|
||||
self.finder, ireq, pre=self.allow_prereleases
|
||||
)
|
||||
|
||||
return {
|
||||
self.hash_cache.get_hash(
|
||||
getattr(candidate, "location", getattr(candidate, "link", None))
|
||||
)
|
||||
for candidate in matching_candidates
|
||||
}
|
||||
|
||||
@contextmanager
|
||||
def allow_all_wheels(self):
|
||||
"""
|
||||
Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
|
||||
|
||||
This also saves the candidate cache and set a new one, or else the results from the
|
||||
previous non-patched calls will interfere.
|
||||
"""
|
||||
|
||||
def _wheel_supported(self, tags=None):
|
||||
# Ignore current platform. Support everything.
|
||||
return True
|
||||
|
||||
def _wheel_support_index_min(self, tags=None):
|
||||
# All wheels are equal priority for sorting.
|
||||
return 0
|
||||
|
||||
original_wheel_supported = Wheel.supported
|
||||
original_support_index_min = Wheel.support_index_min
|
||||
|
||||
Wheel.supported = _wheel_supported
|
||||
Wheel.support_index_min = _wheel_support_index_min
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
Wheel.supported = original_wheel_supported
|
||||
Wheel.support_index_min = original_support_index_min
|
||||
1961
Lib/site-packages/pipenv/vendor/requirementslib/models/setup_info.py
vendored
Normal file
1961
Lib/site-packages/pipenv/vendor/requirementslib/models/setup_info.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
506
Lib/site-packages/pipenv/vendor/requirementslib/models/url.py
vendored
Normal file
506
Lib/site-packages/pipenv/vendor/requirementslib/models/url.py
vendored
Normal file
@@ -0,0 +1,506 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import pip_shims.shims
|
||||
from orderedmultidict import omdict
|
||||
from six.moves.urllib.parse import quote, unquote_plus, unquote as url_unquote
|
||||
from urllib3 import util as urllib3_util
|
||||
from urllib3.util import parse_url as urllib3_parse
|
||||
from urllib3.util.url import Url
|
||||
|
||||
from ..environment import MYPY_RUNNING
|
||||
from ..utils import is_installable_file
|
||||
from .utils import extras_to_string, parse_extras
|
||||
|
||||
if MYPY_RUNNING:
|
||||
from typing import Dict, List, Optional, Text, Tuple, TypeVar, Union
|
||||
from pip_shims.shims import Link
|
||||
from vistir.compat import Path
|
||||
|
||||
_T = TypeVar("_T")
|
||||
STRING_TYPE = Union[bytes, str, Text]
|
||||
S = TypeVar("S", bytes, str, Text)
|
||||
|
||||
|
||||
def _get_parsed_url(url):
|
||||
# type: (S) -> Url
|
||||
"""This is a stand-in function for `urllib3.util.parse_url`
|
||||
|
||||
The orignal function doesn't handle special characters very well, this simply splits
|
||||
out the authentication section, creates the parsed url, then puts the authentication
|
||||
section back in, bypassing validation.
|
||||
|
||||
:return: The new, parsed URL object
|
||||
:rtype: :class:`~urllib3.util.url.Url`
|
||||
"""
|
||||
|
||||
try:
|
||||
parsed = urllib3_parse(url)
|
||||
except ValueError:
|
||||
scheme, _, url = url.partition("://")
|
||||
auth, _, url = url.rpartition("@")
|
||||
url = "{scheme}://{url}".format(scheme=scheme, url=url)
|
||||
parsed = urllib3_parse(url)._replace(auth=auth)
|
||||
if parsed.auth:
|
||||
return parsed._replace(auth=url_unquote(parsed.auth))
|
||||
return parsed
|
||||
|
||||
|
||||
def remove_password_from_url(url):
|
||||
# type: (S) -> S
|
||||
"""Given a url, remove the password and insert 4 dashes.
|
||||
|
||||
:param url: The url to replace the authentication in
|
||||
:type url: S
|
||||
:return: The new URL without authentication
|
||||
:rtype: S
|
||||
"""
|
||||
|
||||
parsed = _get_parsed_url(url)
|
||||
if parsed.auth:
|
||||
auth, _, _ = parsed.auth.partition(":")
|
||||
return parsed._replace(auth="{auth}:----".format(auth=auth)).url
|
||||
return parsed.url
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class URI(object):
|
||||
#: The target hostname, e.g. `amazon.com`
|
||||
host = attr.ib(type=str)
|
||||
#: The URI Scheme, e.g. `salesforce`
|
||||
scheme = attr.ib(default="https", type=str)
|
||||
#: The numeric port of the url if specified
|
||||
port = attr.ib(default=None, type=int)
|
||||
#: The url path, e.g. `/path/to/endpoint`
|
||||
path = attr.ib(default="", type=str)
|
||||
#: Query parameters, e.g. `?variable=value...`
|
||||
query = attr.ib(default="", type=str)
|
||||
#: URL Fragments, e.g. `#fragment=value`
|
||||
fragment = attr.ib(default="", type=str)
|
||||
#: Subdirectory fragment, e.g. `&subdirectory=blah...`
|
||||
subdirectory = attr.ib(default="", type=str)
|
||||
#: VCS ref this URI points at, if available
|
||||
ref = attr.ib(default="", type=str)
|
||||
#: The username if provided, parsed from `user:password@hostname`
|
||||
username = attr.ib(default="", type=str)
|
||||
#: Password parsed from `user:password@hostname`
|
||||
password = attr.ib(default="", type=str, repr=False)
|
||||
#: An orderedmultidict representing query fragments
|
||||
query_dict = attr.ib(factory=omdict, type=omdict)
|
||||
#: The name of the specified package in case it is a VCS URI with an egg fragment
|
||||
name = attr.ib(default="", type=str)
|
||||
#: Any extras requested from the requirement
|
||||
extras = attr.ib(factory=tuple, type=tuple)
|
||||
#: Whether the url was parsed as a direct pep508-style URL
|
||||
is_direct_url = attr.ib(default=False, type=bool)
|
||||
#: Whether the url was an implicit `git+ssh` url (passed as `git+git@`)
|
||||
is_implicit_ssh = attr.ib(default=False, type=bool)
|
||||
_auth = attr.ib(default=None, type=str, repr=False)
|
||||
_fragment_dict = attr.ib(factory=dict, type=dict)
|
||||
_username_is_quoted = attr.ib(type=bool, default=False)
|
||||
_password_is_quoted = attr.ib(type=bool, default=False)
|
||||
|
||||
def _parse_query(self):
|
||||
# type: () -> URI
|
||||
query = self.query if self.query is not None else ""
|
||||
query_dict = omdict()
|
||||
queries = query.split("&")
|
||||
query_items = []
|
||||
subdirectory = self.subdirectory if self.subdirectory else None
|
||||
for q in queries:
|
||||
key, _, val = q.partition("=")
|
||||
val = unquote_plus(val)
|
||||
if key == "subdirectory" and not subdirectory:
|
||||
subdirectory = val
|
||||
else:
|
||||
query_items.append((key, val))
|
||||
query_dict.load(query_items)
|
||||
return attr.evolve(
|
||||
self, query_dict=query_dict, subdirectory=subdirectory, query=query
|
||||
)
|
||||
|
||||
def _parse_fragment(self):
|
||||
# type: () -> URI
|
||||
subdirectory = self.subdirectory if self.subdirectory else ""
|
||||
fragment = self.fragment if self.fragment else ""
|
||||
if self.fragment is None:
|
||||
return self
|
||||
fragments = self.fragment.split("&")
|
||||
fragment_items = {}
|
||||
name = self.name if self.name else ""
|
||||
extras = self.extras
|
||||
for q in fragments:
|
||||
key, _, val = q.partition("=")
|
||||
val = unquote_plus(val)
|
||||
fragment_items[key] = val
|
||||
if key == "egg":
|
||||
from .utils import parse_extras
|
||||
|
||||
name, stripped_extras = pip_shims.shims._strip_extras(val)
|
||||
if stripped_extras:
|
||||
extras = tuple(parse_extras(stripped_extras))
|
||||
elif key == "subdirectory":
|
||||
subdirectory = val
|
||||
return attr.evolve(
|
||||
self,
|
||||
fragment_dict=fragment_items,
|
||||
subdirectory=subdirectory,
|
||||
fragment=fragment,
|
||||
extras=extras,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def _parse_auth(self):
|
||||
# type: () -> URI
|
||||
if self._auth:
|
||||
username, _, password = self._auth.partition(":")
|
||||
username_is_quoted, password_is_quoted = False, False
|
||||
quoted_username, quoted_password = "", ""
|
||||
if password:
|
||||
quoted_password = quote(password)
|
||||
password_is_quoted = quoted_password != password
|
||||
if username:
|
||||
quoted_username = quote(username)
|
||||
username_is_quoted = quoted_username != username
|
||||
return attr.evolve(
|
||||
self,
|
||||
username=quoted_username,
|
||||
password=quoted_password,
|
||||
username_is_quoted=username_is_quoted,
|
||||
password_is_quoted=password_is_quoted,
|
||||
)
|
||||
return self
|
||||
|
||||
def get_password(self, unquote=False, include_token=True):
|
||||
# type: (bool, bool) -> str
|
||||
password = self.password if self.password else ""
|
||||
if password and unquote and self._password_is_quoted:
|
||||
password = url_unquote(password)
|
||||
return password
|
||||
|
||||
def get_username(self, unquote=False):
|
||||
# type: (bool) -> str
|
||||
username = self.username if self.username else ""
|
||||
if username and unquote and self._username_is_quoted:
|
||||
username = url_unquote(username)
|
||||
return username
|
||||
|
||||
@staticmethod
|
||||
def parse_subdirectory(url_part):
|
||||
# type: (str) -> Tuple[str, Optional[str]]
|
||||
subdir = None
|
||||
if "&subdirectory" in url_part:
|
||||
url_part, _, subdir = url_part.rpartition("&")
|
||||
if "#egg=" not in url_part:
|
||||
subdir = "#{0}".format(subdir.strip())
|
||||
else:
|
||||
subdir = "&{0}".format(subdir.strip())
|
||||
return url_part.strip(), subdir
|
||||
|
||||
@classmethod
|
||||
def get_parsed_url(cls, url):
|
||||
# if there is a "#" in the auth section, this could break url parsing
|
||||
parsed_url = _get_parsed_url(url)
|
||||
if "@" in url and "#" in url:
|
||||
scheme = "{0}://".format(parsed_url.scheme)
|
||||
if parsed_url.scheme == "file":
|
||||
scheme = "{0}/".format(scheme)
|
||||
url_without_scheme = url.replace(scheme, "")
|
||||
maybe_auth, _, maybe_url = url_without_scheme.partition("@")
|
||||
if "#" in maybe_auth and (not parsed_url.host or "." not in parsed_url.host):
|
||||
new_parsed_url = _get_parsed_url("{0}{1}".format(scheme, maybe_url))
|
||||
new_parsed_url = new_parsed_url._replace(auth=maybe_auth)
|
||||
return new_parsed_url
|
||||
return parsed_url
|
||||
|
||||
@classmethod
|
||||
def parse(cls, url):
|
||||
# type: (S) -> URI
|
||||
from .utils import DIRECT_URL_RE, split_ref_from_uri
|
||||
|
||||
is_direct_url = False
|
||||
name_with_extras = None
|
||||
is_implicit_ssh = url.strip().startswith("git+git@")
|
||||
if is_implicit_ssh:
|
||||
from ..utils import add_ssh_scheme_to_git_uri
|
||||
|
||||
url = add_ssh_scheme_to_git_uri(url)
|
||||
direct_match = DIRECT_URL_RE.match(url)
|
||||
if direct_match is not None:
|
||||
is_direct_url = True
|
||||
name_with_extras, _, url = url.partition("@")
|
||||
name_with_extras = name_with_extras.strip()
|
||||
url, ref = split_ref_from_uri(url.strip())
|
||||
if "file:/" in url and "file:///" not in url:
|
||||
url = url.replace("file:/", "file:///")
|
||||
parsed = cls.get_parsed_url(url)
|
||||
# if there is a "#" in the auth section, this could break url parsing
|
||||
if not (parsed.scheme and parsed.host):
|
||||
# check if this is a file uri
|
||||
if not (
|
||||
parsed.scheme
|
||||
and parsed.path
|
||||
and (parsed.scheme == "file" or parsed.scheme.endswith("+file"))
|
||||
):
|
||||
raise ValueError("Failed parsing URL {0!r} - Not a valid url".format(url))
|
||||
parsed_dict = dict(parsed._asdict()).copy()
|
||||
parsed_dict["is_direct_url"] = is_direct_url
|
||||
parsed_dict["is_implicit_ssh"] = is_implicit_ssh
|
||||
parsed_dict.update(
|
||||
**update_url_name_and_fragment(name_with_extras, ref, parsed_dict)
|
||||
) # type: ignore
|
||||
return cls(**parsed_dict)._parse_auth()._parse_query()._parse_fragment()
|
||||
|
||||
def to_string(
|
||||
self,
|
||||
escape_password=True, # type: bool
|
||||
unquote=True, # type: bool
|
||||
direct=None, # type: Optional[bool]
|
||||
strip_ssh=False, # type: bool
|
||||
strip_ref=False, # type: bool
|
||||
strip_name=False, # type: bool
|
||||
strip_subdir=False, # type: bool
|
||||
):
|
||||
# type: (...) -> str
|
||||
"""Converts the current URI to a string, unquoting or escaping the
|
||||
password as needed.
|
||||
|
||||
:param escape_password: Whether to replace password with ``----``, default True
|
||||
:param escape_password: bool, optional
|
||||
:param unquote: Whether to unquote url-escapes in the password, default False
|
||||
:param unquote: bool, optional
|
||||
:param bool direct: Whether to format as a direct URL
|
||||
:param bool strip_ssh: Whether to strip the SSH scheme from the url (git only)
|
||||
:param bool strip_ref: Whether to drop the VCS ref (if present)
|
||||
:param bool strip_name: Whether to drop the name and extras (if present)
|
||||
:param bool strip_subdir: Whether to drop the subdirectory (if present)
|
||||
:return: The reconstructed string representing the URI
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if direct is None:
|
||||
direct = self.is_direct_url
|
||||
if escape_password:
|
||||
password = "----" if self.password else ""
|
||||
if password:
|
||||
username = self.get_username(unquote=unquote)
|
||||
elif self.username:
|
||||
username = "----"
|
||||
else:
|
||||
username = ""
|
||||
else:
|
||||
password = self.get_password(unquote=unquote)
|
||||
username = self.get_username(unquote=unquote)
|
||||
auth = ""
|
||||
if username:
|
||||
if password:
|
||||
auth = "{username}:{password}@".format(
|
||||
password=password, username=username
|
||||
)
|
||||
else:
|
||||
auth = "{username}@".format(username=username)
|
||||
query = ""
|
||||
if self.query:
|
||||
query = "{query}?{self.query}".format(query=query, self=self)
|
||||
subdir_prefix = "#"
|
||||
if not direct:
|
||||
if self.name and not strip_name:
|
||||
fragment = "#egg={self.name_with_extras}".format(self=self)
|
||||
subdir_prefix = "&"
|
||||
elif not strip_name and (
|
||||
self.extras and self.scheme and self.scheme.startswith("file")
|
||||
):
|
||||
from .utils import extras_to_string
|
||||
|
||||
fragment = extras_to_string(self.extras)
|
||||
else:
|
||||
fragment = ""
|
||||
query = "{query}{fragment}".format(query=query, fragment=fragment)
|
||||
if self.subdirectory and not strip_subdir:
|
||||
query = "{query}{subdir_prefix}subdirectory={self.subdirectory}".format(
|
||||
query=query, subdir_prefix=subdir_prefix, self=self
|
||||
)
|
||||
host_port_path = self.get_host_port_path(strip_ref=strip_ref)
|
||||
url = "{self.scheme}://{auth}{host_port_path}{query}".format(
|
||||
self=self, auth=auth, host_port_path=host_port_path, query=query
|
||||
)
|
||||
if strip_ssh:
|
||||
from ..utils import strip_ssh_from_git_uri
|
||||
|
||||
url = strip_ssh_from_git_uri(url)
|
||||
if self.name and direct and not strip_name:
|
||||
return "{self.name_with_extras}@ {url}".format(self=self, url=url)
|
||||
return url
|
||||
|
||||
def get_host_port_path(self, strip_ref=False):
|
||||
# type: (bool) -> str
|
||||
host = self.host if self.host else ""
|
||||
if self.port is not None:
|
||||
host = "{host}:{self.port!s}".format(host=host, self=self)
|
||||
path = "{self.path}".format(self=self) if self.path else ""
|
||||
if self.ref and not strip_ref:
|
||||
path = "{path}@{self.ref}".format(path=path, self=self)
|
||||
return "{host}{path}".format(host=host, path=path)
|
||||
|
||||
@property
|
||||
def hidden_auth(self):
|
||||
# type: () -> str
|
||||
auth = ""
|
||||
if self.username and self.password:
|
||||
password = "****"
|
||||
username = self.get_username(unquote=True)
|
||||
auth = "{username}:{password}".format(username=username, password=password)
|
||||
elif self.username and not self.password:
|
||||
auth = "****"
|
||||
return auth
|
||||
|
||||
@property
|
||||
def name_with_extras(self):
|
||||
# type: () -> str
|
||||
from .utils import extras_to_string
|
||||
|
||||
if not self.name:
|
||||
return ""
|
||||
extras = extras_to_string(self.extras)
|
||||
return "{self.name}{extras}".format(self=self, extras=extras)
|
||||
|
||||
@property
|
||||
def as_link(self):
|
||||
# type: () -> Link
|
||||
link = pip_shims.shims.Link(
|
||||
self.to_string(escape_password=False, strip_ssh=False, direct=False)
|
||||
)
|
||||
return link
|
||||
|
||||
@property
|
||||
def bare_url(self):
|
||||
# type: () -> str
|
||||
return self.to_string(
|
||||
escape_password=False,
|
||||
strip_ssh=self.is_implicit_ssh,
|
||||
direct=False,
|
||||
strip_name=True,
|
||||
strip_ref=True,
|
||||
strip_subdir=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def url_without_fragment_or_ref(self):
|
||||
# type: () -> str
|
||||
return self.to_string(
|
||||
escape_password=False,
|
||||
strip_ssh=self.is_implicit_ssh,
|
||||
direct=False,
|
||||
strip_name=True,
|
||||
strip_ref=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def url_without_fragment(self):
|
||||
# type: () -> str
|
||||
return self.to_string(
|
||||
escape_password=False,
|
||||
strip_ssh=self.is_implicit_ssh,
|
||||
direct=False,
|
||||
strip_name=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def url_without_ref(self):
|
||||
# type: () -> str
|
||||
return self.to_string(
|
||||
escape_password=False,
|
||||
strip_ssh=self.is_implicit_ssh,
|
||||
direct=False,
|
||||
strip_ref=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def base_url(self):
|
||||
# type: () -> str
|
||||
return self.to_string(
|
||||
escape_password=False,
|
||||
strip_ssh=self.is_implicit_ssh,
|
||||
direct=False,
|
||||
unquote=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def full_url(self):
|
||||
# type: () -> str
|
||||
return self.to_string(escape_password=False, strip_ssh=False, direct=False)
|
||||
|
||||
@property
|
||||
def secret(self):
|
||||
# type: () -> str
|
||||
return self.full_url
|
||||
|
||||
@property
|
||||
def safe_string(self):
|
||||
# type: () -> str
|
||||
return self.to_string(escape_password=True, unquote=True)
|
||||
|
||||
@property
|
||||
def unsafe_string(self):
|
||||
# type: () -> str
|
||||
return self.to_string(escape_password=False, unquote=True)
|
||||
|
||||
@property
|
||||
def uri_escape(self):
|
||||
# type: () -> str
|
||||
return self.to_string(escape_password=False, unquote=False)
|
||||
|
||||
@property
|
||||
def is_installable(self):
|
||||
# type: () -> bool
|
||||
return self.is_file_url and is_installable_file(self.bare_url)
|
||||
|
||||
@property
|
||||
def is_vcs(self):
|
||||
# type: () -> bool
|
||||
from ..utils import VCS_SCHEMES
|
||||
|
||||
return self.scheme in VCS_SCHEMES
|
||||
|
||||
@property
|
||||
def is_file_url(self):
|
||||
# type: () -> bool
|
||||
return all([self.scheme, self.scheme == "file"])
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return self.to_string(escape_password=True, unquote=True)
|
||||
|
||||
|
||||
def update_url_name_and_fragment(name_with_extras, ref, parsed_dict):
|
||||
# type: (Optional[str], Optional[str], Dict[str, Optional[str]]) -> Dict[str, Optional[str]]
|
||||
if name_with_extras:
|
||||
fragment = "" # type: Optional[str]
|
||||
parsed_extras = ()
|
||||
name, extras = pip_shims.shims._strip_extras(name_with_extras)
|
||||
if extras:
|
||||
parsed_extras = parsed_extras + tuple(parse_extras(extras))
|
||||
if parsed_dict["fragment"] is not None:
|
||||
fragment = "{0}".format(parsed_dict["fragment"])
|
||||
if fragment.startswith("egg="):
|
||||
_, _, fragment_part = fragment.partition("=")
|
||||
fragment_name, fragment_extras = pip_shims.shims._strip_extras(
|
||||
fragment_part
|
||||
)
|
||||
name = name if name else fragment_name
|
||||
if fragment_extras:
|
||||
parsed_extras = parsed_extras + tuple(parse_extras(fragment_extras))
|
||||
name_with_extras = "{0}{1}".format(name, extras_to_string(parsed_extras))
|
||||
elif (
|
||||
parsed_dict.get("path") is not None and "&subdirectory" in parsed_dict["path"]
|
||||
):
|
||||
path, fragment = URI.parse_subdirectory(parsed_dict["path"]) # type: ignore
|
||||
parsed_dict["path"] = path
|
||||
elif ref is not None and "&subdirectory" in ref:
|
||||
ref, fragment = URI.parse_subdirectory(ref)
|
||||
parsed_dict["name"] = name
|
||||
parsed_dict["extras"] = parsed_extras
|
||||
if ref:
|
||||
parsed_dict["ref"] = ref.strip()
|
||||
return parsed_dict
|
||||
1037
Lib/site-packages/pipenv/vendor/requirementslib/models/utils.py
vendored
Normal file
1037
Lib/site-packages/pipenv/vendor/requirementslib/models/utils.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
136
Lib/site-packages/pipenv/vendor/requirementslib/models/vcs.py
vendored
Normal file
136
Lib/site-packages/pipenv/vendor/requirementslib/models/vcs.py
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pipenv.vendor import attr
|
||||
import pip_shims
|
||||
import six
|
||||
|
||||
from ..environment import MYPY_RUNNING
|
||||
from .url import URI
|
||||
|
||||
if MYPY_RUNNING:
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class VCSRepository(object):
|
||||
DEFAULT_RUN_ARGS = None
|
||||
|
||||
url = attr.ib() # type: str
|
||||
name = attr.ib() # type: str
|
||||
checkout_directory = attr.ib() # type: str
|
||||
vcs_type = attr.ib() # type: str
|
||||
parsed_url = attr.ib() # type: URI
|
||||
subdirectory = attr.ib(default=None) # type: Optional[str]
|
||||
commit_sha = attr.ib(default=None) # type: Optional[str]
|
||||
ref = attr.ib(default=None) # type: Optional[str]
|
||||
repo_backend = attr.ib() # type: Any
|
||||
clone_log = attr.ib(default=None) # type: Optional[str]
|
||||
|
||||
@parsed_url.default
|
||||
def get_parsed_url(self):
|
||||
# type: () -> URI
|
||||
return URI.parse(self.url)
|
||||
|
||||
@repo_backend.default
|
||||
def get_repo_backend(self):
|
||||
if self.DEFAULT_RUN_ARGS is None:
|
||||
default_run_args = self.monkeypatch_pip()
|
||||
else:
|
||||
default_run_args = self.DEFAULT_RUN_ARGS
|
||||
from pip_shims.shims import VcsSupport
|
||||
|
||||
VCS_SUPPORT = VcsSupport()
|
||||
backend = VCS_SUPPORT.get_backend(self.vcs_type)
|
||||
# repo = backend(url=self.url)
|
||||
if backend.run_command.__func__.__defaults__ != default_run_args:
|
||||
backend.run_command.__func__.__defaults__ = default_run_args
|
||||
return backend
|
||||
|
||||
@property
|
||||
def is_local(self):
|
||||
# type: () -> bool
|
||||
url = self.url
|
||||
if "+" in url:
|
||||
url = url.split("+")[1]
|
||||
return url.startswith("file")
|
||||
|
||||
def obtain(self):
|
||||
# type: () -> None
|
||||
lt_pip_19_2 = (
|
||||
pip_shims.parsed_pip_version.parsed_version < pip_shims.parse_version("19.2")
|
||||
)
|
||||
if lt_pip_19_2:
|
||||
self.repo_backend = self.repo_backend(self.url)
|
||||
if os.path.exists(
|
||||
self.checkout_directory
|
||||
) and not self.repo_backend.is_repository_directory(self.checkout_directory):
|
||||
self.repo_backend.unpack(self.checkout_directory)
|
||||
elif not os.path.exists(self.checkout_directory):
|
||||
if lt_pip_19_2:
|
||||
self.repo_backend.obtain(self.checkout_directory)
|
||||
else:
|
||||
self.repo_backend.obtain(self.checkout_directory, self.parsed_url)
|
||||
else:
|
||||
if self.ref:
|
||||
self.checkout_ref(self.ref)
|
||||
if not self.commit_sha:
|
||||
self.commit_sha = self.get_commit_hash()
|
||||
|
||||
def checkout_ref(self, ref):
|
||||
# type: (str) -> None
|
||||
rev_opts = self.repo_backend.make_rev_options(ref)
|
||||
if not any(
|
||||
[
|
||||
self.repo_backend.is_commit_id_equal(self.checkout_directory, ref),
|
||||
self.repo_backend.is_commit_id_equal(self.checkout_directory, rev_opts),
|
||||
self.is_local,
|
||||
]
|
||||
):
|
||||
self.update(ref)
|
||||
|
||||
def update(self, ref):
|
||||
# type: (str) -> None
|
||||
target_ref = self.repo_backend.make_rev_options(ref)
|
||||
if pip_shims.parse_version(pip_shims.pip_version) > pip_shims.parse_version(
|
||||
"18.0"
|
||||
):
|
||||
self.repo_backend.update(self.checkout_directory, self.url, target_ref)
|
||||
else:
|
||||
self.repo_backend.update(self.checkout_directory, target_ref)
|
||||
self.commit_sha = self.get_commit_hash()
|
||||
|
||||
def get_commit_hash(self, ref=None):
|
||||
# type: (Optional[str]) -> str
|
||||
with pip_shims.shims.global_tempdir_manager():
|
||||
return self.repo_backend.get_revision(self.checkout_directory)
|
||||
|
||||
@classmethod
|
||||
def monkeypatch_pip(cls):
|
||||
# type: () -> Tuple[Any, ...]
|
||||
from pip_shims.compat import get_allowed_args
|
||||
|
||||
target_module = pip_shims.shims.VcsSupport.__module__
|
||||
pip_vcs = importlib.import_module(target_module)
|
||||
args, kwargs = get_allowed_args(pip_vcs.VersionControl.run_command)
|
||||
run_command_defaults = pip_vcs.VersionControl.run_command.__defaults__
|
||||
if "show_stdout" not in args and "show_stdout" not in kwargs:
|
||||
new_defaults = run_command_defaults
|
||||
else:
|
||||
# set the default to not write stdout, the first option sets this value
|
||||
new_defaults = [False] + list(run_command_defaults)[1:]
|
||||
new_defaults = tuple(new_defaults)
|
||||
if six.PY3:
|
||||
try:
|
||||
pip_vcs.VersionControl.run_command.__defaults__ = new_defaults
|
||||
except AttributeError:
|
||||
pip_vcs.VersionControl.run_command.__func__.__defaults__ = new_defaults
|
||||
else:
|
||||
pip_vcs.VersionControl.run_command.__func__.__defaults__ = new_defaults
|
||||
sys.modules[target_module] = pip_vcs
|
||||
cls.DEFAULT_RUN_ARGS = new_defaults
|
||||
return new_defaults
|
||||
Reference in New Issue
Block a user