login page
This commit is contained in:
26
Lib/site-packages/pipenv/patched/piptools/LICENSE
Normal file
26
Lib/site-packages/pipenv/patched/piptools/LICENSE
Normal file
@@ -0,0 +1,26 @@
|
||||
Copyright (c). All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of pip-tools nor the names of its contributors may be
|
||||
used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
11
Lib/site-packages/pipenv/patched/piptools/__init__.py
Normal file
11
Lib/site-packages/pipenv/patched/piptools/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import locale
|
||||
|
||||
from piptools.click import secho
|
||||
|
||||
# Needed for locale.getpreferredencoding(False) to work
|
||||
# in pip._internal.utils.encoding.auto_decode
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
except locale.Error as e: # pragma: no cover
|
||||
# setlocale can apparently crash if locale are uninitialized
|
||||
secho("Ignoring error when setting locale: {}".format(e), fg="red")
|
||||
17
Lib/site-packages/pipenv/patched/piptools/__main__.py
Normal file
17
Lib/site-packages/pipenv/patched/piptools/__main__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import click
|
||||
|
||||
from piptools.scripts import compile, sync
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
cli.add_command(compile.cli, "compile")
|
||||
cli.add_command(sync.cli, "sync")
|
||||
|
||||
|
||||
# Enable ``python -m piptools ...``.
|
||||
if __name__ == "__main__": # pragma: no branch
|
||||
cli()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,42 @@
|
||||
# coding: utf-8
|
||||
# flake8: noqa
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import six
|
||||
|
||||
from .pip_compat import (
|
||||
DEV_PKGS,
|
||||
FAVORITE_HASH,
|
||||
PIP_VERSION,
|
||||
FormatControl,
|
||||
InstallationCandidate,
|
||||
InstallCommand,
|
||||
InstallationError,
|
||||
InstallRequirement,
|
||||
Link,
|
||||
PackageFinder,
|
||||
PyPI,
|
||||
RequirementSet,
|
||||
RequirementTracker,
|
||||
Resolver,
|
||||
SafeFileCache,
|
||||
VcsSupport,
|
||||
Wheel,
|
||||
WheelCache,
|
||||
cmdoptions,
|
||||
get_installed_distributions,
|
||||
install_req_from_editable,
|
||||
install_req_from_line,
|
||||
parse_requirements,
|
||||
path_to_url,
|
||||
pip_version,
|
||||
stdlib_pkgs,
|
||||
url_to_path,
|
||||
user_cache_dir,
|
||||
normalize_path,
|
||||
)
|
||||
|
||||
if six.PY2:
|
||||
from .tempfile import TemporaryDirectory
|
||||
else:
|
||||
from tempfile import TemporaryDirectory
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,18 @@
|
||||
# Ported from python 3.7 contextlib.py
|
||||
class nullcontext(object):
|
||||
"""Context manager that does no additional processing.
|
||||
Used as a stand-in for a normal context manager, when a particular
|
||||
block of code is only sometimes used with a normal context manager:
|
||||
cm = optional_cm if condition else nullcontext()
|
||||
with cm:
|
||||
# Perform operation, using optional_cm if condition is True
|
||||
"""
|
||||
|
||||
def __init__(self, enter_result=None):
|
||||
self.enter_result = enter_result
|
||||
|
||||
def __enter__(self):
|
||||
return self.enter_result
|
||||
|
||||
def __exit__(self, *excinfo):
|
||||
pass
|
||||
@@ -0,0 +1,77 @@
|
||||
# -*- coding=utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
import importlib
|
||||
import os
|
||||
from appdirs import user_cache_dir
|
||||
os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip")
|
||||
import pip_shims.shims
|
||||
from pip_shims.models import ShimmedPathCollection, ImportTypes
|
||||
|
||||
InstallationCandidate = ShimmedPathCollection("InstallationCandidate", ImportTypes.CLASS)
|
||||
InstallationCandidate.create_path("models.candidate", "18.0", "9999")
|
||||
InstallationCandidate.create_path("index", "7.0.3", "10.9.9")
|
||||
|
||||
PIP_VERSION = tuple(map(int, pip_shims.shims.parsed_pip_version.parsed_version.base_version.split(".")))
|
||||
|
||||
RequirementTracker = pip_shims.shims.RequirementTracker
|
||||
|
||||
def do_import(module_path, subimport=None, old_path=None):
|
||||
old_path = old_path or module_path
|
||||
pip_path = os.environ.get("PIP_SHIMS_BASE_MODULE", "pip")
|
||||
prefixes = ["{}._internal".format(pip_path), pip_path]
|
||||
paths = [module_path, old_path]
|
||||
search_order = [
|
||||
"{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None
|
||||
]
|
||||
package = subimport if subimport else None
|
||||
for to_import in search_order:
|
||||
if not subimport:
|
||||
to_import, _, package = to_import.rpartition(".")
|
||||
try:
|
||||
imported = importlib.import_module(to_import)
|
||||
except ImportError:
|
||||
continue
|
||||
else:
|
||||
return getattr(imported, package)
|
||||
|
||||
if PIP_VERSION[:2] <= (20, 0):
|
||||
def install_req_from_parsed_requirement(req, **kwargs):
|
||||
return req
|
||||
|
||||
else:
|
||||
from pipenv.patched.notpip._internal.req.constructors import install_req_from_parsed_requirement
|
||||
|
||||
InstallRequirement = pip_shims.shims.InstallRequirement
|
||||
InstallationError = pip_shims.shims.InstallationError
|
||||
parse_requirements = pip_shims.shims.parse_requirements
|
||||
RequirementSet = pip_shims.shims.RequirementSet
|
||||
SafeFileCache = pip_shims.shims.SafeFileCache
|
||||
FAVORITE_HASH = pip_shims.shims.FAVORITE_HASH
|
||||
path_to_url = pip_shims.shims.path_to_url
|
||||
url_to_path = pip_shims.shims.url_to_path
|
||||
PackageFinder = pip_shims.shims.PackageFinder
|
||||
FormatControl = pip_shims.shims.FormatControl
|
||||
InstallCommand = pip_shims.shims.InstallCommand
|
||||
Wheel = pip_shims.shims.Wheel
|
||||
cmdoptions = pip_shims.shims.cmdoptions
|
||||
get_installed_distributions = pip_shims.shims.get_installed_distributions
|
||||
PyPI = pip_shims.shims.PyPI
|
||||
stdlib_pkgs = pip_shims.shims.stdlib_pkgs
|
||||
DEV_PKGS = pip_shims.shims.DEV_PKGS
|
||||
Link = pip_shims.shims.Link
|
||||
Session = do_import("_vendor.requests.sessions", "Session")
|
||||
Resolver = pip_shims.shims.Resolver
|
||||
VcsSupport = pip_shims.shims.VcsSupport
|
||||
WheelCache = pip_shims.shims.WheelCache
|
||||
pip_version = pip_shims.shims.pip_version
|
||||
normalize_path = do_import("utils.misc", "normalize_path")
|
||||
install_req_from_line = pip_shims.shims.install_req_from_line
|
||||
install_req_from_editable = pip_shims.shims.install_req_from_editable
|
||||
|
||||
def parse_requirements(
|
||||
filename, session, finder=None, options=None, constraint=False, isolated=False
|
||||
):
|
||||
for parsed_req in _parse_requirements(
|
||||
filename, session, finder=finder, options=options, constraint=constraint
|
||||
):
|
||||
yield install_req_from_parsed_requirement(parsed_req, isolated=isolated)
|
||||
@@ -0,0 +1,88 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os as _os
|
||||
import sys as _sys
|
||||
import warnings as _warnings
|
||||
from tempfile import mkdtemp
|
||||
|
||||
|
||||
class TemporaryDirectory(object):
|
||||
"""Create and return a temporary directory. This has the same
|
||||
behavior as mkdtemp but can be used as a context manager. For
|
||||
example:
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
...
|
||||
|
||||
Upon exiting the context, the directory and everything contained
|
||||
in it are removed.
|
||||
"""
|
||||
|
||||
def __init__(self, suffix="", prefix="tmp", dir=None):
|
||||
self._closed = False
|
||||
self.name = None # Handle mkdtemp raising an exception
|
||||
self.name = mkdtemp(suffix, prefix, dir)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{} {!r}>".format(self.__class__.__name__, self.name)
|
||||
|
||||
def __enter__(self):
|
||||
return self.name
|
||||
|
||||
def cleanup(self):
|
||||
if self.name and not self._closed:
|
||||
try:
|
||||
self._rmtree(self.name)
|
||||
except (TypeError, AttributeError) as ex:
|
||||
# Issue #10188: Emit a warning on stderr
|
||||
# if the directory could not be cleaned
|
||||
# up due to missing globals
|
||||
if "None" not in str(ex):
|
||||
raise
|
||||
print(
|
||||
"ERROR: {!r} while cleaning up {!r}".format(ex, self),
|
||||
file=_sys.stderr,
|
||||
)
|
||||
return
|
||||
self._closed = True
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
self.cleanup()
|
||||
|
||||
def __del__(self):
|
||||
# Issue a ResourceWarning if implicit cleanup needed
|
||||
self.cleanup()
|
||||
|
||||
# XXX (ncoghlan): The following code attempts to make
|
||||
# this class tolerant of the module nulling out process
|
||||
# that happens during CPython interpreter shutdown
|
||||
# Alas, it doesn't actually manage it. See issue #10188
|
||||
_listdir = staticmethod(_os.listdir)
|
||||
_path_join = staticmethod(_os.path.join)
|
||||
_isdir = staticmethod(_os.path.isdir)
|
||||
_islink = staticmethod(_os.path.islink)
|
||||
_remove = staticmethod(_os.remove)
|
||||
_rmdir = staticmethod(_os.rmdir)
|
||||
_warn = _warnings.warn
|
||||
|
||||
def _rmtree(self, path):
|
||||
# Essentially a stripped down version of shutil.rmtree. We can't
|
||||
# use globals because they may be None'ed out at shutdown.
|
||||
for name in self._listdir(path):
|
||||
fullname = self._path_join(path, name)
|
||||
try:
|
||||
isdir = self._isdir(fullname) and not self._islink(fullname)
|
||||
except OSError:
|
||||
isdir = False
|
||||
if isdir:
|
||||
self._rmtree(fullname)
|
||||
else:
|
||||
try:
|
||||
self._remove(fullname)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
self._rmdir(path)
|
||||
except OSError:
|
||||
pass
|
||||
170
Lib/site-packages/pipenv/patched/piptools/cache.py
Normal file
170
Lib/site-packages/pipenv/patched/piptools/cache.py
Normal file
@@ -0,0 +1,170 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from pipenv.vendor.packaging.requirements import Requirement
|
||||
|
||||
from .exceptions import PipToolsError
|
||||
from .utils import as_tuple, key_from_req, lookup_table
|
||||
|
||||
_PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"}
|
||||
|
||||
|
||||
def _implementation_name():
|
||||
"""similar to PEP 425, however the minor version is separated from the
|
||||
major to differentation "3.10" and "31.0".
|
||||
"""
|
||||
implementation_name = platform.python_implementation().lower()
|
||||
implementation = _PEP425_PY_TAGS.get(implementation_name, "??")
|
||||
return "{}{}.{}".format(implementation, *sys.version_info)
|
||||
|
||||
|
||||
class CorruptCacheError(PipToolsError):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def __str__(self):
|
||||
lines = [
|
||||
"The dependency cache seems to have been corrupted.",
|
||||
"Inspect, or delete, the following file:",
|
||||
" {}".format(self.path),
|
||||
]
|
||||
return os.linesep.join(lines)
|
||||
|
||||
|
||||
def read_cache_file(cache_file_path):
|
||||
with open(cache_file_path, "r") as cache_file:
|
||||
try:
|
||||
doc = json.load(cache_file)
|
||||
except ValueError:
|
||||
raise CorruptCacheError(cache_file_path)
|
||||
|
||||
# Check version and load the contents
|
||||
if doc["__format__"] != 1:
|
||||
raise AssertionError("Unknown cache file format")
|
||||
return doc["dependencies"]
|
||||
|
||||
|
||||
class DependencyCache(object):
|
||||
"""
|
||||
Creates a new persistent dependency cache for the current Python version.
|
||||
The cache file is written to the appropriate user cache dir for the
|
||||
current platform, i.e.
|
||||
|
||||
~/.cache/pip-tools/depcache-pyX.Y.json
|
||||
|
||||
Where py indicates the Python implementation.
|
||||
Where X.Y indicates the Python version.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir):
|
||||
if not os.path.isdir(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
cache_filename = "depcache-{}.json".format(_implementation_name())
|
||||
|
||||
self._cache_file = os.path.join(cache_dir, cache_filename)
|
||||
self._cache = None
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""
|
||||
The dictionary that is the actual in-memory cache. This property
|
||||
lazily loads the cache from disk.
|
||||
"""
|
||||
if self._cache is None:
|
||||
self.read_cache()
|
||||
return self._cache
|
||||
|
||||
def as_cache_key(self, ireq):
|
||||
"""
|
||||
Given a requirement, return its cache key. This behavior is a little weird
|
||||
in order to allow backwards compatibility with cache files. For a requirement
|
||||
without extras, this will return, for example:
|
||||
|
||||
("ipython", "2.1.0")
|
||||
|
||||
For a requirement with extras, the extras will be comma-separated and appended
|
||||
to the version, inside brackets, like so:
|
||||
|
||||
("ipython", "2.1.0[nbconvert,notebook]")
|
||||
"""
|
||||
name, version, extras = as_tuple(ireq)
|
||||
if not extras:
|
||||
extras_string = ""
|
||||
else:
|
||||
extras_string = "[{}]".format(",".join(extras))
|
||||
return name, "{}{}".format(version, extras_string)
|
||||
|
||||
def read_cache(self):
|
||||
"""Reads the cached contents into memory."""
|
||||
if os.path.exists(self._cache_file):
|
||||
self._cache = read_cache_file(self._cache_file)
|
||||
else:
|
||||
self._cache = {}
|
||||
|
||||
def write_cache(self):
|
||||
"""Writes the cache to disk as JSON."""
|
||||
doc = {"__format__": 1, "dependencies": self._cache}
|
||||
with open(self._cache_file, "w") as f:
|
||||
json.dump(doc, f, sort_keys=True)
|
||||
|
||||
def clear(self):
|
||||
self._cache = {}
|
||||
self.write_cache()
|
||||
|
||||
def __contains__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return pkgversion_and_extras in self.cache.get(pkgname, {})
|
||||
|
||||
def __getitem__(self, ireq):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
return self.cache[pkgname][pkgversion_and_extras]
|
||||
|
||||
def __setitem__(self, ireq, values):
|
||||
pkgname, pkgversion_and_extras = self.as_cache_key(ireq)
|
||||
self.cache.setdefault(pkgname, {})
|
||||
self.cache[pkgname][pkgversion_and_extras] = values
|
||||
self.write_cache()
|
||||
|
||||
def reverse_dependencies(self, ireqs):
|
||||
"""
|
||||
Returns a lookup table of reverse dependencies for all the given ireqs.
|
||||
|
||||
Since this is all static, it only works if the dependency cache
|
||||
contains the complete data, otherwise you end up with a partial view.
|
||||
This is typically no problem if you use this function after the entire
|
||||
dependency tree is resolved.
|
||||
"""
|
||||
ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs]
|
||||
return self._reverse_dependencies(ireqs_as_cache_values)
|
||||
|
||||
def _reverse_dependencies(self, cache_keys):
|
||||
"""
|
||||
Returns a lookup table of reverse dependencies for all the given cache keys.
|
||||
|
||||
Example input:
|
||||
|
||||
[('pep8', '1.5.7'),
|
||||
('flake8', '2.4.0'),
|
||||
('mccabe', '0.3'),
|
||||
('pyflakes', '0.8.1')]
|
||||
|
||||
Example output:
|
||||
|
||||
{'pep8': ['flake8'],
|
||||
'flake8': [],
|
||||
'mccabe': ['flake8'],
|
||||
'pyflakes': ['flake8']}
|
||||
|
||||
"""
|
||||
# First, collect all the dependencies into a sequence of (parent, child)
|
||||
# tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...]
|
||||
return lookup_table(
|
||||
(key_from_req(Requirement(dep_name)), name)
|
||||
for name, version_and_extras in cache_keys
|
||||
for dep_name in self.cache[name][version_and_extras]
|
||||
)
|
||||
6
Lib/site-packages/pipenv/patched/piptools/click.py
Normal file
6
Lib/site-packages/pipenv/patched/piptools/click.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import click
|
||||
from click import * # noqa
|
||||
|
||||
click.disable_unicode_literals_warning = True
|
||||
60
Lib/site-packages/pipenv/patched/piptools/exceptions.py
Normal file
60
Lib/site-packages/pipenv/patched/piptools/exceptions.py
Normal file
@@ -0,0 +1,60 @@
|
||||
class PipToolsError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoCandidateFound(PipToolsError):
|
||||
def __init__(self, ireq, candidates_tried, finder):
|
||||
self.ireq = ireq
|
||||
self.candidates_tried = candidates_tried
|
||||
self.finder = finder
|
||||
|
||||
def __str__(self):
|
||||
versions = []
|
||||
pre_versions = []
|
||||
|
||||
for candidate in sorted(self.candidates_tried):
|
||||
version = str(candidate.version)
|
||||
if candidate.version.is_prerelease:
|
||||
pre_versions.append(version)
|
||||
else:
|
||||
versions.append(version)
|
||||
|
||||
lines = ["Could not find a version that matches {}".format(self.ireq)]
|
||||
|
||||
if versions:
|
||||
lines.append("Tried: {}".format(", ".join(versions)))
|
||||
|
||||
if pre_versions:
|
||||
if self.finder.allow_all_prereleases:
|
||||
line = "Tried"
|
||||
else:
|
||||
line = "Skipped"
|
||||
|
||||
line += " pre-versions: {}".format(", ".join(pre_versions))
|
||||
lines.append(line)
|
||||
|
||||
if versions or pre_versions:
|
||||
lines.append(
|
||||
"There are incompatible versions in the resolved dependencies:"
|
||||
)
|
||||
source_ireqs = getattr(self.ireq, "_source_ireqs", [])
|
||||
lines.extend(" {}".format(ireq) for ireq in source_ireqs)
|
||||
else:
|
||||
lines.append("No versions found")
|
||||
lines.append(
|
||||
"{} {} reachable?".format(
|
||||
"Were" if len(self.finder.index_urls) > 1 else "Was",
|
||||
" or ".join(self.finder.index_urls),
|
||||
)
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
class IncompatibleRequirements(PipToolsError):
|
||||
def __init__(self, ireq_a, ireq_b):
|
||||
self.ireq_a = ireq_a
|
||||
self.ireq_b = ireq_b
|
||||
|
||||
def __str__(self):
|
||||
message = "Incompatible requirements found: {} and {}"
|
||||
return message.format(self.ireq_a, self.ireq_b)
|
||||
28
Lib/site-packages/pipenv/patched/piptools/locations.py
Normal file
28
Lib/site-packages/pipenv/patched/piptools/locations.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import os
|
||||
from shutil import rmtree
|
||||
|
||||
from ._compat import user_cache_dir
|
||||
|
||||
from .click import secho
|
||||
|
||||
# The user_cache_dir helper comes straight from pipenv.patched.notpip itself
|
||||
try:
|
||||
from pipenv.environments import PIPENV_CACHE_DIR as CACHE_DIR
|
||||
except ImportError:
|
||||
CACHE_DIR = user_cache_dir("pipenv")
|
||||
|
||||
# NOTE
|
||||
# We used to store the cache dir under ~/.pip-tools, which is not the
|
||||
# preferred place to store caches for any platform. This has been addressed
|
||||
# in pip-tools==1.0.5, but to be good citizens, we point this out explicitly
|
||||
# to the user when this directory is still found.
|
||||
LEGACY_CACHE_DIR = os.path.expanduser("~/.pip-tools")
|
||||
|
||||
if os.path.exists(LEGACY_CACHE_DIR):
|
||||
secho(
|
||||
"Removing old cache dir {} (new cache dir is {})".format(
|
||||
LEGACY_CACHE_DIR, CACHE_DIR
|
||||
),
|
||||
fg="yellow",
|
||||
)
|
||||
rmtree(LEGACY_CACHE_DIR)
|
||||
38
Lib/site-packages/pipenv/patched/piptools/logging.py
Normal file
38
Lib/site-packages/pipenv/patched/piptools/logging.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from . import click
|
||||
|
||||
# Initialise the builtin logging module for other component using it.
|
||||
# Ex: pip
|
||||
logging.basicConfig()
|
||||
|
||||
|
||||
class LogContext(object):
|
||||
def __init__(self, verbosity=0):
|
||||
self.verbosity = verbosity
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
kwargs.setdefault("err", True)
|
||||
click.secho(*args, **kwargs)
|
||||
|
||||
def debug(self, *args, **kwargs):
|
||||
if self.verbosity >= 1:
|
||||
self.log(*args, **kwargs)
|
||||
|
||||
def info(self, *args, **kwargs):
|
||||
if self.verbosity >= 0:
|
||||
self.log(*args, **kwargs)
|
||||
|
||||
def warning(self, *args, **kwargs):
|
||||
kwargs.setdefault("fg", "yellow")
|
||||
self.log(*args, **kwargs)
|
||||
|
||||
def error(self, *args, **kwargs):
|
||||
kwargs.setdefault("fg", "red")
|
||||
self.log(*args, **kwargs)
|
||||
|
||||
|
||||
log = LogContext()
|
||||
@@ -0,0 +1,3 @@
|
||||
# flake8: noqa
|
||||
from .local import LocalRequirementsRepository
|
||||
from .pypi import PyPIRepository
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,46 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from contextlib import contextmanager
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class BaseRepository(object):
|
||||
def clear_caches(self):
|
||||
"""Should clear any caches used by the implementation."""
|
||||
|
||||
def freshen_build_caches(self):
|
||||
"""Should start with fresh build/source caches."""
|
||||
|
||||
@abstractmethod
|
||||
def find_best_match(self, ireq):
|
||||
"""
|
||||
Return a Version object that indicates the best match for the given
|
||||
InstallRequirement according to the repository.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_dependencies(self, ireq):
|
||||
"""
|
||||
Given a pinned, URL, or editable InstallRequirement, returns a set of
|
||||
dependencies (also InstallRequirements, but not necessarily pinned).
|
||||
They indicate the secondary dependencies for the given requirement.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_hashes(self, ireq):
|
||||
"""
|
||||
Given a pinned InstallRequire, returns a set of hashes that represent
|
||||
all of the files for a given requirement. It is not acceptable for an
|
||||
editable or unpinned requirement to be passed to this function.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
@contextmanager
|
||||
def allow_all_wheels(self):
|
||||
"""
|
||||
Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
|
||||
"""
|
||||
@@ -0,0 +1,93 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .._compat import PIP_VERSION, FAVORITE_HASH
|
||||
from .base import BaseRepository
|
||||
|
||||
from piptools.utils import as_tuple, key_from_ireq, make_install_requirement
|
||||
|
||||
|
||||
def ireq_satisfied_by_existing_pin(ireq, existing_pin):
|
||||
"""
|
||||
Return True if the given InstallationRequirement is satisfied by the
|
||||
previously encountered version pin.
|
||||
"""
|
||||
version = next(iter(existing_pin.req.specifier)).version
|
||||
return ireq.req.specifier.contains(
|
||||
version, prereleases=existing_pin.req.specifier.prereleases
|
||||
)
|
||||
|
||||
|
||||
class LocalRequirementsRepository(BaseRepository):
|
||||
"""
|
||||
The LocalRequirementsRepository proxied the _real_ repository by first
|
||||
checking if a requirement can be satisfied by existing pins (i.e. the
|
||||
result of a previous compile step).
|
||||
|
||||
In effect, if a requirement can be satisfied with a version pinned in the
|
||||
requirements file, we prefer that version over the best match found in
|
||||
PyPI. This keeps updates to the requirements.txt down to a minimum.
|
||||
"""
|
||||
|
||||
def __init__(self, existing_pins, proxied_repository):
|
||||
self.repository = proxied_repository
|
||||
self.existing_pins = existing_pins
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
return self.repository.options
|
||||
|
||||
@property
|
||||
def finder(self):
|
||||
return self.repository.finder
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self.repository.session
|
||||
|
||||
@property
|
||||
def DEFAULT_INDEX_URL(self):
|
||||
return self.repository.DEFAULT_INDEX_URL
|
||||
|
||||
def clear_caches(self):
|
||||
self.repository.clear_caches()
|
||||
|
||||
def freshen_build_caches(self):
|
||||
self.repository.freshen_build_caches()
|
||||
|
||||
def find_best_match(self, ireq, prereleases=None):
|
||||
key = key_from_ireq(ireq)
|
||||
existing_pin = self.existing_pins.get(key)
|
||||
if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
|
||||
project, version, _ = as_tuple(existing_pin)
|
||||
return make_install_requirement(
|
||||
project, version, ireq.extras, ireq.markers,
|
||||
constraint=ireq.constraint
|
||||
)
|
||||
else:
|
||||
return self.repository.find_best_match(ireq, prereleases)
|
||||
|
||||
def get_dependencies(self, ireq):
|
||||
return self.repository.get_dependencies(ireq)
|
||||
|
||||
def get_hashes(self, ireq):
|
||||
key = key_from_ireq(ireq)
|
||||
existing_pin = self.existing_pins.get(key)
|
||||
if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin):
|
||||
if PIP_VERSION[:2] <= (20, 0):
|
||||
hashes = existing_pin.options.get("hashes", {})
|
||||
else:
|
||||
hashes = existing_pin.hash_options
|
||||
hexdigests = hashes.get(FAVORITE_HASH)
|
||||
if hexdigests:
|
||||
return {
|
||||
":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests
|
||||
}
|
||||
return self.repository.get_hashes(ireq)
|
||||
|
||||
@contextmanager
|
||||
def allow_all_wheels(self):
|
||||
with self.repository.allow_all_wheels():
|
||||
yield
|
||||
487
Lib/site-packages/pipenv/patched/piptools/repositories/pypi.py
Normal file
487
Lib/site-packages/pipenv/patched/piptools/repositories/pypi.py
Normal file
@@ -0,0 +1,487 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import hashlib
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from shutil import rmtree
|
||||
|
||||
from pip_shims.shims import (
|
||||
TempDirectory,
|
||||
global_tempdir_manager,
|
||||
get_requirement_tracker,
|
||||
InstallCommand
|
||||
)
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.specifiers import Specifier, SpecifierSet
|
||||
|
||||
from .._compat import (
|
||||
FAVORITE_HASH,
|
||||
PIP_VERSION,
|
||||
InstallationError,
|
||||
InstallRequirement,
|
||||
Link,
|
||||
normalize_path,
|
||||
PyPI,
|
||||
RequirementSet,
|
||||
RequirementTracker,
|
||||
SafeFileCache,
|
||||
TemporaryDirectory,
|
||||
VcsSupport,
|
||||
Wheel,
|
||||
WheelCache,
|
||||
contextlib,
|
||||
path_to_url,
|
||||
pip_version,
|
||||
url_to_path,
|
||||
)
|
||||
from ..locations import CACHE_DIR
|
||||
from ..click import progressbar
|
||||
from ..exceptions import NoCandidateFound
|
||||
from ..logging import log
|
||||
from ..utils import (
|
||||
dedup,
|
||||
clean_requires_python,
|
||||
fs_str,
|
||||
is_pinned_requirement,
|
||||
is_url_requirement,
|
||||
lookup_table,
|
||||
make_install_requirement,
|
||||
)
|
||||
from .base import BaseRepository
|
||||
|
||||
os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip")
|
||||
FILE_CHUNK_SIZE = 4096
|
||||
FileStream = collections.namedtuple("FileStream", "stream size")
|
||||
|
||||
|
||||
class HashCache(SafeFileCache):
|
||||
"""Caches hashes of PyPI artifacts so we do not need to re-download them
|
||||
|
||||
Hashes are only cached when the URL appears to contain a hash in it and the cache key includes
|
||||
the hash value returned from the server). This ought to avoid ssues where the location on the
|
||||
server changes."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
session = kwargs.pop('session')
|
||||
self.session = session
|
||||
kwargs.setdefault('directory', os.path.join(CACHE_DIR, 'hash-cache'))
|
||||
super(HashCache, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_hash(self, location):
|
||||
# if there is no location hash (i.e., md5 / sha256 / etc) we on't want to store it
|
||||
hash_value = None
|
||||
vcs = VcsSupport()
|
||||
orig_scheme = location.scheme
|
||||
new_location = copy.deepcopy(location)
|
||||
if orig_scheme in vcs.all_schemes:
|
||||
new_location.url = new_location.url.split("+", 1)[-1]
|
||||
can_hash = new_location.hash
|
||||
if can_hash:
|
||||
# hash url WITH fragment
|
||||
hash_value = self.get(new_location.url)
|
||||
if not hash_value:
|
||||
hash_value = self._get_file_hash(new_location) if not new_location.url.startswith("ssh") else None
|
||||
hash_value = hash_value.encode('utf8') if hash_value else None
|
||||
if can_hash:
|
||||
self.set(new_location.url, hash_value)
|
||||
return hash_value.decode('utf8') if hash_value else None
|
||||
|
||||
def _get_file_hash(self, location):
|
||||
h = hashlib.new(FAVORITE_HASH)
|
||||
with open_local_or_remote_file(location, self.session) as (fp, size):
|
||||
for chunk in iter(lambda: fp.read(8096), b""):
|
||||
h.update(chunk)
|
||||
return ":".join([FAVORITE_HASH, h.hexdigest()])
|
||||
|
||||
|
||||
class PyPIRepository(BaseRepository):
|
||||
DEFAULT_INDEX_URL = PyPI.simple_url
|
||||
|
||||
"""
|
||||
The PyPIRepository will use the provided Finder instance to lookup
|
||||
packages. Typically, it looks up packages on PyPI (the default implicit
|
||||
config), but any other PyPI mirror can be used if index_urls is
|
||||
changed/configured on the Finder.
|
||||
"""
|
||||
|
||||
def __init__(self, pip_args, cache_dir=CACHE_DIR, session=None, build_isolation=False, use_json=False):
|
||||
self.build_isolation = build_isolation
|
||||
self.use_json = use_json
|
||||
self.cache_dir = cache_dir
|
||||
|
||||
# Use pip's parser for pip.conf management and defaults.
|
||||
# General options (find_links, index_url, extra_index_url, trusted_host,
|
||||
# and pre) are deferred to pip.
|
||||
self.command = InstallCommand()
|
||||
self.options, _ = self.command.parse_args(pip_args)
|
||||
if self.build_isolation is not None:
|
||||
self.options.build_isolation = build_isolation
|
||||
if self.options.cache_dir:
|
||||
self.options.cache_dir = normalize_path(self.options.cache_dir)
|
||||
|
||||
self.options.require_hashes = False
|
||||
self.options.ignore_dependencies = False
|
||||
|
||||
if session is None:
|
||||
session = self.command._build_session(self.options)
|
||||
self.session = session
|
||||
self.finder = self.command._build_package_finder(
|
||||
options=self.options, session=self.session, ignore_requires_python=True
|
||||
)
|
||||
|
||||
# Caches
|
||||
# stores project_name => InstallationCandidate mappings for all
|
||||
# versions reported by PyPI, so we only have to ask once for each
|
||||
# project
|
||||
self._available_candidates_cache = {}
|
||||
|
||||
# stores InstallRequirement => list(InstallRequirement) mappings
|
||||
# of all secondary dependencies for the given requirement, so we
|
||||
# only have to go to disk once for each requirement
|
||||
self._dependencies_cache = {}
|
||||
self._json_dep_cache = {}
|
||||
|
||||
# stores *full* path + fragment => sha256
|
||||
self._hash_cache = HashCache(session=session)
|
||||
|
||||
# Setup file paths
|
||||
self.freshen_build_caches()
|
||||
self._cache_dir = normalize_path(cache_dir)
|
||||
self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
|
||||
self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))
|
||||
|
||||
def freshen_build_caches(self):
|
||||
"""
|
||||
Start with fresh build/source caches. Will remove any old build
|
||||
caches from disk automatically.
|
||||
"""
|
||||
self._build_dir = TemporaryDirectory(fs_str("build"))
|
||||
self._source_dir = TemporaryDirectory(fs_str("source"))
|
||||
|
||||
@property
|
||||
def build_dir(self):
|
||||
return self._build_dir.name
|
||||
|
||||
@property
|
||||
def source_dir(self):
|
||||
return self._source_dir.name
|
||||
|
||||
def clear_caches(self):
|
||||
rmtree(self._download_dir, ignore_errors=True)
|
||||
rmtree(self._wheel_download_dir, ignore_errors=True)
|
||||
|
||||
def find_all_candidates(self, req_name):
|
||||
if req_name not in self._available_candidates_cache:
|
||||
candidates = self.finder.find_all_candidates(req_name)
|
||||
self._available_candidates_cache[req_name] = candidates
|
||||
return self._available_candidates_cache[req_name]
|
||||
|
||||
def find_best_match(self, ireq, prereleases=None):
|
||||
"""
|
||||
Returns a Version object that indicates the best match for the given
|
||||
InstallRequirement according to the external repository.
|
||||
"""
|
||||
if ireq.editable or is_url_requirement(ireq):
|
||||
return ireq # return itself as the best match
|
||||
|
||||
all_candidates = clean_requires_python(self.find_all_candidates(ireq.name))
|
||||
candidates_by_version = lookup_table(
|
||||
all_candidates, key=lambda c: c.version, unique=True
|
||||
)
|
||||
try:
|
||||
matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates),
|
||||
prereleases=prereleases)
|
||||
except TypeError:
|
||||
matching_versions = [candidate.version for candidate in all_candidates]
|
||||
|
||||
# Reuses pip's internal candidate sort key to sort
|
||||
matching_candidates = [candidates_by_version[ver] for ver in matching_versions]
|
||||
if not matching_candidates:
|
||||
raise NoCandidateFound(ireq, all_candidates, self.finder)
|
||||
|
||||
evaluator = self.finder.make_candidate_evaluator(ireq.name)
|
||||
best_candidate_result = evaluator.compute_best_candidate(matching_candidates)
|
||||
best_candidate = best_candidate_result.best_candidate
|
||||
|
||||
# Turn the candidate into a pinned InstallRequirement
|
||||
return make_install_requirement(
|
||||
best_candidate.name,
|
||||
best_candidate.version,
|
||||
ireq.extras,
|
||||
ireq.markers,
|
||||
constraint=ireq.constraint,
|
||||
)
|
||||
|
||||
def get_dependencies(self, ireq):
|
||||
json_results = set()
|
||||
|
||||
if self.use_json:
|
||||
try:
|
||||
json_results = self.get_json_dependencies(ireq)
|
||||
except TypeError:
|
||||
json_results = set()
|
||||
|
||||
legacy_results = self.get_legacy_dependencies(ireq)
|
||||
json_results.update(legacy_results)
|
||||
|
||||
return json_results
|
||||
|
||||
def get_json_dependencies(self, ireq):
|
||||
|
||||
if not (is_pinned_requirement(ireq)):
|
||||
raise TypeError('Expected pinned InstallRequirement, got {}'.format(ireq))
|
||||
|
||||
def gen(ireq):
|
||||
if self.DEFAULT_INDEX_URL not in self.finder.index_urls:
|
||||
return
|
||||
|
||||
url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name)
|
||||
releases = self.session.get(url).json()['releases']
|
||||
|
||||
matches = [
|
||||
r for r in releases
|
||||
if '=={0}'.format(r) == str(ireq.req.specifier)
|
||||
]
|
||||
if not matches:
|
||||
return
|
||||
|
||||
release_requires = self.session.get(
|
||||
'https://pypi.org/pypi/{0}/{1}/json'.format(
|
||||
ireq.req.name, matches[0],
|
||||
),
|
||||
).json()
|
||||
try:
|
||||
requires_dist = release_requires['info']['requires_dist']
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
for requires in requires_dist:
|
||||
i = InstallRequirement.from_line(requires)
|
||||
if 'extra' not in repr(i.markers):
|
||||
yield i
|
||||
|
||||
try:
|
||||
if ireq not in self._json_dep_cache:
|
||||
self._json_dep_cache[ireq] = [g for g in gen(ireq)]
|
||||
|
||||
return set(self._json_dep_cache[ireq])
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
def resolve_reqs(self, download_dir, ireq, wheel_cache):
|
||||
with get_requirement_tracker() as req_tracker, TempDirectory(
|
||||
kind="resolver"
|
||||
) as temp_dir:
|
||||
preparer = self.command.make_requirement_preparer(
|
||||
temp_build_dir=temp_dir,
|
||||
options=self.options,
|
||||
req_tracker=req_tracker,
|
||||
session=self.session,
|
||||
finder=self.finder,
|
||||
use_user_site=False,
|
||||
download_dir=download_dir,
|
||||
wheel_download_dir=self._wheel_download_dir,
|
||||
)
|
||||
|
||||
reqset = RequirementSet()
|
||||
ireq.is_direct = True
|
||||
reqset.add_requirement(ireq)
|
||||
|
||||
resolver = self.command.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=self.finder,
|
||||
options=self.options,
|
||||
wheel_cache=wheel_cache,
|
||||
use_user_site=False,
|
||||
ignore_installed=True,
|
||||
ignore_requires_python=True,
|
||||
force_reinstall=False,
|
||||
upgrade_strategy="to-satisfy-only",
|
||||
)
|
||||
results = resolver._resolve_one(reqset, ireq)
|
||||
|
||||
if PIP_VERSION[:2] <= (20, 0):
|
||||
reqset.cleanup_files()
|
||||
results = set(results) if results else set()
|
||||
|
||||
return results, ireq
|
||||
|
||||
def get_legacy_dependencies(self, ireq):
|
||||
"""
|
||||
Given a pinned, URL, or editable InstallRequirement, returns a set of
|
||||
dependencies (also InstallRequirements, but not necessarily pinned).
|
||||
They indicate the secondary dependencies for the given requirement.
|
||||
"""
|
||||
if not (
|
||||
ireq.editable or is_url_requirement(ireq) or is_pinned_requirement(ireq)
|
||||
):
|
||||
raise TypeError(
|
||||
"Expected url, pinned or editable InstallRequirement, got {}".format(
|
||||
ireq
|
||||
)
|
||||
)
|
||||
|
||||
if ireq not in self._dependencies_cache:
|
||||
if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)):
|
||||
# No download_dir for locally available editable requirements.
|
||||
# If a download_dir is passed, pip will unnecessarely
|
||||
# archive the entire source directory
|
||||
download_dir = None
|
||||
elif ireq.link and ireq.link.is_vcs:
|
||||
# No download_dir for VCS sources. This also works around pip
|
||||
# using git-checkout-index, which gets rid of the .git dir.
|
||||
download_dir = None
|
||||
else:
|
||||
download_dir = self._download_dir
|
||||
if not os.path.isdir(download_dir):
|
||||
os.makedirs(download_dir)
|
||||
if not os.path.isdir(self._wheel_download_dir):
|
||||
os.makedirs(self._wheel_download_dir)
|
||||
|
||||
with global_tempdir_manager():
|
||||
wheel_cache = WheelCache(self._cache_dir, self.options.format_control)
|
||||
prev_tracker = os.environ.get("PIP_REQ_TRACKER")
|
||||
try:
|
||||
results, ireq = self.resolve_reqs(
|
||||
download_dir, ireq, wheel_cache
|
||||
)
|
||||
self._dependencies_cache[ireq] = results
|
||||
finally:
|
||||
if "PIP_REQ_TRACKER" in os.environ:
|
||||
if prev_tracker:
|
||||
os.environ["PIP_REQ_TRACKER"] = prev_tracker
|
||||
else:
|
||||
del os.environ["PIP_REQ_TRACKER"]
|
||||
|
||||
if PIP_VERSION[:2] <= (20, 0):
|
||||
wheel_cache.cleanup()
|
||||
|
||||
return self._dependencies_cache[ireq]
|
||||
|
||||
def get_hashes(self, ireq):
|
||||
"""
|
||||
Given an InstallRequirement, return a set of hashes that represent all
|
||||
of the files for a given requirement. Unhashable requirements return an
|
||||
empty set. Unpinned requirements raise a TypeError.
|
||||
"""
|
||||
|
||||
if ireq.link:
|
||||
link = ireq.link
|
||||
|
||||
if link.is_vcs or (link.is_file and link.is_existing_dir()):
|
||||
# Return empty set for unhashable requirements.
|
||||
# Unhashable logic modeled on pip's
|
||||
# RequirementPreparer.prepare_linked_requirement
|
||||
return set()
|
||||
|
||||
if is_url_requirement(ireq):
|
||||
# Directly hash URL requirements.
|
||||
# URL requirements may have been previously downloaded and cached
|
||||
# locally by self.resolve_reqs()
|
||||
cached_path = os.path.join(self._download_dir, link.filename)
|
||||
if os.path.exists(cached_path):
|
||||
cached_link = Link(path_to_url(cached_path))
|
||||
else:
|
||||
cached_link = link
|
||||
return {self._hash_cache._get_file_hash(cached_link)}
|
||||
|
||||
if not is_pinned_requirement(ireq):
|
||||
raise TypeError("Expected pinned requirement, got {}".format(ireq))
|
||||
|
||||
# We need to get all of the candidates that match our current version
|
||||
# pin, these will represent all of the files that could possibly
|
||||
# satisfy this constraint.
|
||||
|
||||
result = {}
|
||||
with self.allow_all_links():
|
||||
matching_candidates = (
|
||||
c for c in clean_requires_python(self.find_all_candidates(ireq.name))
|
||||
if c.version in ireq.specifier
|
||||
)
|
||||
log.debug(" {}".format(ireq.name))
|
||||
result = {
|
||||
h for h in
|
||||
map(lambda c: self._hash_cache.get_hash(c.link), matching_candidates)
|
||||
if h is not None
|
||||
}
|
||||
return result
|
||||
|
||||
@contextmanager
|
||||
def allow_all_links(self):
|
||||
try:
|
||||
self.finder._ignore_compatibility = True
|
||||
yield
|
||||
finally:
|
||||
self.finder._ignore_compatibility = False
|
||||
|
||||
@contextmanager
|
||||
def allow_all_wheels(self):
|
||||
"""
|
||||
Monkey patches pip.Wheel to allow wheels from all platforms and Python versions.
|
||||
|
||||
This also saves the candidate cache and set a new one, or else the results from
|
||||
the previous non-patched calls will interfere.
|
||||
"""
|
||||
|
||||
def _wheel_supported(self, tags=None):
|
||||
# Ignore current platform. Support everything.
|
||||
return True
|
||||
|
||||
def _wheel_support_index_min(self, tags=None):
|
||||
# All wheels are equal priority for sorting.
|
||||
return 0
|
||||
|
||||
original_wheel_supported = Wheel.supported
|
||||
original_support_index_min = Wheel.support_index_min
|
||||
original_cache = self._available_candidates_cache
|
||||
|
||||
Wheel.supported = _wheel_supported
|
||||
Wheel.support_index_min = _wheel_support_index_min
|
||||
self._available_candidates_cache = {}
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
Wheel.supported = original_wheel_supported
|
||||
Wheel.support_index_min = original_support_index_min
|
||||
self._available_candidates_cache = original_cache
|
||||
|
||||
|
||||
@contextmanager
|
||||
def open_local_or_remote_file(link, session):
|
||||
"""
|
||||
Open local or remote file for reading.
|
||||
|
||||
:type link: pip.index.Link
|
||||
:type session: requests.Session
|
||||
:raises ValueError: If link points to a local directory.
|
||||
:return: a context manager to a FileStream with the opened file-like object
|
||||
"""
|
||||
url = link.url_without_fragment
|
||||
|
||||
if link.is_file:
|
||||
# Local URL
|
||||
local_path = url_to_path(url)
|
||||
if os.path.isdir(local_path):
|
||||
raise ValueError("Cannot open directory for read: {}".format(url))
|
||||
else:
|
||||
st = os.stat(local_path)
|
||||
with open(local_path, "rb") as local_file:
|
||||
yield FileStream(stream=local_file, size=st.st_size)
|
||||
else:
|
||||
# Remote URL
|
||||
headers = {"Accept-Encoding": "identity"}
|
||||
response = session.get(url, headers=headers, stream=True)
|
||||
|
||||
# Content length must be int or None
|
||||
try:
|
||||
content_length = int(response.headers["content-length"])
|
||||
except (ValueError, KeyError, TypeError):
|
||||
content_length = None
|
||||
|
||||
try:
|
||||
yield FileStream(stream=response.raw, size=content_length)
|
||||
finally:
|
||||
response.close()
|
||||
404
Lib/site-packages/pipenv/patched/piptools/resolver.py
Normal file
404
Lib/site-packages/pipenv/patched/piptools/resolver.py
Normal file
@@ -0,0 +1,404 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import copy
|
||||
import os
|
||||
from functools import partial
|
||||
from itertools import chain, count
|
||||
|
||||
from pip_shims.shims import install_req_from_line
|
||||
from pipenv.vendor.requirementslib.models.markers import normalize_marker_str
|
||||
from packaging.markers import Marker
|
||||
|
||||
from . import click
|
||||
from .logging import log
|
||||
from .utils import (
|
||||
UNSAFE_PACKAGES,
|
||||
format_requirement,
|
||||
format_specifier,
|
||||
full_groupby,
|
||||
is_pinned_requirement,
|
||||
is_url_requirement,
|
||||
key_from_ireq,
|
||||
)
|
||||
|
||||
green = partial(click.style, fg="green")
|
||||
magenta = partial(click.style, fg="magenta")
|
||||
|
||||
|
||||
class RequirementSummary(object):
|
||||
"""
|
||||
Summary of a requirement's properties for comparison purposes.
|
||||
"""
|
||||
|
||||
def __init__(self, ireq):
|
||||
self.req = ireq.req
|
||||
self.key = key_from_ireq(ireq)
|
||||
self.extras = str(sorted(ireq.extras))
|
||||
self.markers = ireq.markers
|
||||
self.specifier = str(ireq.specifier)
|
||||
|
||||
def __eq__(self, other):
|
||||
return str(self) == str(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self))
|
||||
|
||||
def __str__(self):
|
||||
return repr([self.key, self.specifier, self.extras])
|
||||
|
||||
|
||||
def combine_install_requirements(ireqs):
|
||||
"""
|
||||
Return a single install requirement that reflects a combination of
|
||||
all the inputs.
|
||||
"""
|
||||
# We will store the source ireqs in a _source_ireqs attribute;
|
||||
# if any of the inputs have this, then use those sources directly.
|
||||
source_ireqs = []
|
||||
for ireq in ireqs:
|
||||
source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
|
||||
|
||||
# deepcopy the accumulator so as to not modify the inputs
|
||||
combined_ireq = copy.deepcopy(source_ireqs[0])
|
||||
for ireq in source_ireqs[1:]:
|
||||
# NOTE we may be losing some info on dropped reqs here
|
||||
if combined_ireq.req is not None and ireq.req is not None:
|
||||
combined_ireq.req.specifier &= ireq.req.specifier
|
||||
combined_ireq.constraint &= ireq.constraint
|
||||
if ireq.markers and not combined_ireq.markers:
|
||||
combined_ireq.markers = copy.deepcopy(ireq.markers)
|
||||
elif ireq.markers and combined_ireq.markers:
|
||||
_markers = [] # type: List[Marker]
|
||||
for marker in [ireq.markers, combined_ireq.markers]:
|
||||
if isinstance(marker, str):
|
||||
_markers.append(Marker(marker))
|
||||
else:
|
||||
_markers.append(marker)
|
||||
marker_str = " and ".join([normalize_marker_str(m) for m in _markers if m])
|
||||
combined_ireq.markers = Marker(marker_str)
|
||||
# Return a sorted, de-duped tuple of extras
|
||||
combined_ireq.extras = tuple(
|
||||
sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
|
||||
)
|
||||
|
||||
# InstallRequirements objects are assumed to come from only one source, and
|
||||
# so they support only a single comes_from entry. This function breaks this
|
||||
# model. As a workaround, we deterministically choose a single source for
|
||||
# the comes_from entry, and add an extra _source_ireqs attribute to keep
|
||||
# track of multiple sources for use within pip-tools.
|
||||
if len(source_ireqs) > 1:
|
||||
if any(ireq.comes_from is None for ireq in source_ireqs):
|
||||
# None indicates package was directly specified.
|
||||
combined_ireq.comes_from = None
|
||||
else:
|
||||
# Populate the comes_from field from one of the sources.
|
||||
# Requirement input order is not stable, so we need to sort:
|
||||
# We choose the shortest entry in order to keep the printed
|
||||
# representation as concise as possible.
|
||||
combined_ireq.comes_from = min(
|
||||
(ireq.comes_from for ireq in source_ireqs),
|
||||
key=lambda x: (len(str(x)), str(x)),
|
||||
)
|
||||
combined_ireq._source_ireqs = source_ireqs
|
||||
return combined_ireq
|
||||
|
||||
|
||||
class Resolver(object):
|
||||
def __init__(
|
||||
self,
|
||||
constraints,
|
||||
repository,
|
||||
cache,
|
||||
prereleases=False,
|
||||
clear_caches=False,
|
||||
allow_unsafe=False,
|
||||
):
|
||||
"""
|
||||
This class resolves a given set of constraints (a collection of
|
||||
InstallRequirement objects) by consulting the given Repository and the
|
||||
DependencyCache.
|
||||
"""
|
||||
self.our_constraints = set(constraints)
|
||||
self.their_constraints = set()
|
||||
self.repository = repository
|
||||
self.dependency_cache = cache
|
||||
self.prereleases = prereleases
|
||||
self.clear_caches = clear_caches
|
||||
self.allow_unsafe = allow_unsafe
|
||||
self.unsafe_constraints = set()
|
||||
|
||||
@property
|
||||
def constraints(self):
|
||||
return set(
|
||||
self._group_constraints(
|
||||
chain(
|
||||
sorted(self.our_constraints, key=str),
|
||||
sorted(self.their_constraints, key=str),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def resolve_hashes(self, ireqs):
|
||||
"""
|
||||
Finds acceptable hashes for all of the given InstallRequirements.
|
||||
"""
|
||||
log.debug("")
|
||||
log.debug("Generating hashes:")
|
||||
with self.repository.allow_all_wheels():
|
||||
return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
|
||||
|
||||
def resolve(self, max_rounds=10):
|
||||
"""
|
||||
Finds concrete package versions for all the given InstallRequirements
|
||||
and their recursive dependencies. The end result is a flat list of
|
||||
(name, version) tuples. (Or an editable package.)
|
||||
|
||||
Resolves constraints one round at a time, until they don't change
|
||||
anymore. Protects against infinite loops by breaking out after a max
|
||||
number rounds.
|
||||
"""
|
||||
if self.clear_caches:
|
||||
self.dependency_cache.clear()
|
||||
self.repository.clear_caches()
|
||||
|
||||
# Ignore existing packages
|
||||
os.environ[str("PIP_EXISTS_ACTION")] = str(
|
||||
"i"
|
||||
) # NOTE: str() wrapping necessary for Python 2/3 compat
|
||||
for current_round in count(start=1): # pragma: no branch
|
||||
if current_round > max_rounds:
|
||||
raise RuntimeError(
|
||||
"No stable configuration of concrete packages "
|
||||
"could be found for the given constraints after "
|
||||
"{max_rounds} rounds of resolving.\n"
|
||||
"This is likely a bug.".format(max_rounds=max_rounds)
|
||||
)
|
||||
|
||||
log.debug("")
|
||||
log.debug(magenta("{:^60}".format("ROUND {}".format(current_round))))
|
||||
has_changed, best_matches = self._resolve_one_round()
|
||||
log.debug("-" * 60)
|
||||
log.debug(
|
||||
"Result of round {}: {}".format(
|
||||
current_round, "not stable" if has_changed else "stable, done"
|
||||
)
|
||||
)
|
||||
if not has_changed:
|
||||
break
|
||||
|
||||
# If a package version (foo==2.0) was built in a previous round,
|
||||
# and in this round a different version of foo needs to be built
|
||||
# (i.e. foo==1.0), the directory will exist already, which will
|
||||
# cause a pip build failure. The trick is to start with a new
|
||||
# build cache dir for every round, so this can never happen.
|
||||
self.repository.freshen_build_caches()
|
||||
|
||||
del os.environ["PIP_EXISTS_ACTION"]
|
||||
|
||||
# Only include hard requirements and not pip constraints
|
||||
results = {req for req in best_matches if not req.constraint}
|
||||
|
||||
# Filter out unsafe requirements.
|
||||
self.unsafe_constraints = set()
|
||||
if not self.allow_unsafe:
|
||||
# reverse_dependencies is used to filter out packages that are only
|
||||
# required by unsafe packages. This logic is incomplete, as it would
|
||||
# fail to filter sub-sub-dependencies of unsafe packages. None of the
|
||||
# UNSAFE_PACKAGES currently have any dependencies at all (which makes
|
||||
# sense for installation tools) so this seems sufficient.
|
||||
reverse_dependencies = self.reverse_dependencies(results)
|
||||
for req in results.copy():
|
||||
required_by = reverse_dependencies.get(req.name.lower(), [])
|
||||
if req.name in UNSAFE_PACKAGES or (
|
||||
required_by and all(name in UNSAFE_PACKAGES for name in required_by)
|
||||
):
|
||||
self.unsafe_constraints.add(req)
|
||||
results.remove(req)
|
||||
|
||||
return results
|
||||
|
||||
def _group_constraints(self, constraints):
|
||||
"""
|
||||
Groups constraints (remember, InstallRequirements!) by their key name,
|
||||
and combining their SpecifierSets into a single InstallRequirement per
|
||||
package. For example, given the following constraints:
|
||||
|
||||
Django<1.9,>=1.4.2
|
||||
django~=1.5
|
||||
Flask~=0.7
|
||||
|
||||
This will be combined into a single entry per package:
|
||||
|
||||
django~=1.5,<1.9,>=1.4.2
|
||||
flask~=0.7
|
||||
|
||||
"""
|
||||
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
|
||||
yield combine_install_requirements(ireqs)
|
||||
|
||||
def _resolve_one_round(self):
|
||||
"""
|
||||
Resolves one level of the current constraints, by finding the best
|
||||
match for each package in the repository and adding all requirements
|
||||
for those best package versions. Some of these constraints may be new
|
||||
or updated.
|
||||
|
||||
Returns whether new constraints appeared in this round. If no
|
||||
constraints were added or changed, this indicates a stable
|
||||
configuration.
|
||||
"""
|
||||
# Sort this list for readability of terminal output
|
||||
constraints = sorted(self.constraints, key=key_from_ireq)
|
||||
|
||||
log.debug("Current constraints:")
|
||||
for constraint in constraints:
|
||||
log.debug(" {}".format(constraint))
|
||||
|
||||
log.debug("")
|
||||
log.debug("Finding the best candidates:")
|
||||
best_matches = {self.get_best_match(ireq) for ireq in constraints}
|
||||
|
||||
# Find the new set of secondary dependencies
|
||||
log.debug("")
|
||||
log.debug("Finding secondary dependencies:")
|
||||
|
||||
their_constraints = []
|
||||
for best_match in best_matches:
|
||||
their_constraints.extend(self._iter_dependencies(best_match))
|
||||
# Grouping constraints to make clean diff between rounds
|
||||
theirs = set(self._group_constraints(sorted(their_constraints, key=str)))
|
||||
|
||||
# NOTE: We need to compare RequirementSummary objects, since
|
||||
# InstallRequirement does not define equality
|
||||
diff = {RequirementSummary(t) for t in theirs} - {
|
||||
RequirementSummary(t) for t in self.their_constraints
|
||||
}
|
||||
removed = {RequirementSummary(t) for t in self.their_constraints} - {
|
||||
RequirementSummary(t) for t in theirs
|
||||
}
|
||||
|
||||
has_changed = len(diff) > 0 or len(removed) > 0
|
||||
if has_changed:
|
||||
log.debug("")
|
||||
log.debug("New dependencies found in this round:")
|
||||
for new_dependency in sorted(diff, key=key_from_ireq):
|
||||
log.debug(" adding {}".format(new_dependency))
|
||||
log.debug("Removed dependencies in this round:")
|
||||
for removed_dependency in sorted(removed, key=key_from_ireq):
|
||||
log.debug(" removing {}".format(removed_dependency))
|
||||
|
||||
# Store the last round's results in the their_constraints
|
||||
self.their_constraints = theirs
|
||||
return has_changed, best_matches
|
||||
|
||||
def get_best_match(self, ireq):
|
||||
"""
|
||||
Returns a (pinned or editable) InstallRequirement, indicating the best
|
||||
match to use for the given InstallRequirement (in the form of an
|
||||
InstallRequirement).
|
||||
|
||||
Example:
|
||||
Given the constraint Flask>=0.10, may return Flask==0.10.1 at
|
||||
a certain moment in time.
|
||||
|
||||
Pinned requirements will always return themselves, i.e.
|
||||
|
||||
Flask==0.10.1 => Flask==0.10.1
|
||||
|
||||
"""
|
||||
if ireq.editable or is_url_requirement(ireq):
|
||||
# NOTE: it's much quicker to immediately return instead of
|
||||
# hitting the index server
|
||||
best_match = ireq
|
||||
elif is_pinned_requirement(ireq):
|
||||
# NOTE: it's much quicker to immediately return instead of
|
||||
# hitting the index server
|
||||
best_match = ireq
|
||||
else:
|
||||
best_match = self.repository.find_best_match(
|
||||
ireq, prereleases=self.prereleases
|
||||
)
|
||||
|
||||
# Format the best match
|
||||
log.debug(
|
||||
" found candidate {} (constraint was {})".format(
|
||||
format_requirement(best_match), format_specifier(ireq)
|
||||
)
|
||||
)
|
||||
best_match.comes_from = ireq.comes_from
|
||||
if hasattr(ireq, "_source_ireqs"):
|
||||
best_match._source_ireqs = ireq._source_ireqs
|
||||
return best_match
|
||||
|
||||
def _iter_dependencies(self, ireq):
|
||||
"""
|
||||
Given a pinned, url, or editable InstallRequirement, collects all the
|
||||
secondary dependencies for them, either by looking them up in a local
|
||||
cache, or by reaching out to the repository.
|
||||
|
||||
Editable requirements will never be looked up, as they may have
|
||||
changed at any time.
|
||||
"""
|
||||
# Pip does not resolve dependencies of constraints. We skip handling
|
||||
# constraints here as well to prevent the cache from being polluted.
|
||||
# Constraints that are later determined to be dependencies will be
|
||||
# marked as non-constraints in later rounds by
|
||||
# `combine_install_requirements`, and will be properly resolved.
|
||||
# See https://github.com/pypa/pip/
|
||||
# blob/6896dfcd831330c13e076a74624d95fa55ff53f4/src/pip/_internal/
|
||||
# legacy_resolve.py#L325
|
||||
if ireq.constraint:
|
||||
return
|
||||
|
||||
if ireq.editable or (is_url_requirement(ireq) and not ireq.link.is_wheel):
|
||||
for dependency in self.repository.get_dependencies(ireq):
|
||||
yield dependency
|
||||
return
|
||||
|
||||
# fix our malformed extras
|
||||
if ireq.extras:
|
||||
if getattr(ireq, "extra", None):
|
||||
if ireq.extras:
|
||||
ireq.extras.extend(ireq.extra)
|
||||
else:
|
||||
ireq.extras = ireq.extra
|
||||
|
||||
elif not is_pinned_requirement(ireq):
|
||||
raise TypeError(
|
||||
"Expected pinned or editable requirement, got {}".format(ireq)
|
||||
)
|
||||
|
||||
# Now, either get the dependencies from the dependency cache (for
|
||||
# speed), or reach out to the external repository to
|
||||
# download and inspect the package version and get dependencies
|
||||
# from there
|
||||
if ireq not in self.dependency_cache:
|
||||
log.debug(
|
||||
" {} not in cache, need to check index".format(
|
||||
format_requirement(ireq)
|
||||
),
|
||||
fg="yellow",
|
||||
)
|
||||
dependencies = self.repository.get_dependencies(ireq)
|
||||
self.dependency_cache[ireq] = sorted(set(format_requirement(ireq) for ireq in dependencies))
|
||||
|
||||
# Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
|
||||
dependency_strings = self.dependency_cache[ireq]
|
||||
log.debug(
|
||||
" {:25} requires {}".format(
|
||||
format_requirement(ireq),
|
||||
", ".join(sorted(dependency_strings, key=lambda s: s.lower())) or "-",
|
||||
)
|
||||
)
|
||||
for dependency_string in dependency_strings:
|
||||
yield install_req_from_line(
|
||||
dependency_string, constraint=ireq.constraint, comes_from=ireq
|
||||
)
|
||||
|
||||
def reverse_dependencies(self, ireqs):
|
||||
is_non_wheel_url = lambda r: is_url_requirement(r) and not r.link.is_wheel
|
||||
non_editable = [
|
||||
ireq for ireq in ireqs if not (ireq.editable or is_non_wheel_url(ireq))
|
||||
]
|
||||
return self.dependency_cache.reverse_dependencies(non_editable)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
433
Lib/site-packages/pipenv/patched/piptools/scripts/compile.py
Normal file
433
Lib/site-packages/pipenv/patched/piptools/scripts/compile.py
Normal file
@@ -0,0 +1,433 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from click.utils import safecall
|
||||
from ._compat import InstallCommand
|
||||
from ._compat import install_req_from_line
|
||||
|
||||
from .. import click
|
||||
from .._compat import parse_requirements
|
||||
from ..cache import DependencyCache
|
||||
from ..exceptions import PipToolsError
|
||||
from ..locations import CACHE_DIR
|
||||
from ..logging import log
|
||||
from ..repositories import LocalRequirementsRepository, PyPIRepository
|
||||
from ..resolver import Resolver
|
||||
from ..utils import UNSAFE_PACKAGES, dedup, is_pinned_requirement, key_from_ireq
|
||||
from ..writer import OutputWriter
|
||||
|
||||
DEFAULT_REQUIREMENTS_FILE = "requirements.in"
|
||||
DEFAULT_REQUIREMENTS_OUTPUT_FILE = "requirements.txt"
|
||||
|
||||
# Get default values of the pip's options (including options from pipenv.patched.notpip.conf).
|
||||
install_command = InstallComand()
|
||||
pip_defaults = install_command.parser.get_default_values()
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.version_option()
|
||||
@click.pass_context
|
||||
@click.option("-v", "--verbose", count=True, help="Show more output")
|
||||
@click.option("-q", "--quiet", count=True, help="Give less output")
|
||||
@click.option(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
is_flag=True,
|
||||
help="Only show what would happen, don't change anything",
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--pre",
|
||||
is_flag=True,
|
||||
default=None,
|
||||
help="Allow resolving to prereleases (default is not)",
|
||||
)
|
||||
@click.option(
|
||||
"-r",
|
||||
"--rebuild",
|
||||
is_flag=True,
|
||||
help="Clear any caches upfront, rebuild from scratch",
|
||||
)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--find-links",
|
||||
multiple=True,
|
||||
help="Look for archives in this directory or on this HTML page",
|
||||
envvar="PIP_FIND_LINKS",
|
||||
)
|
||||
@click.option(
|
||||
"-i",
|
||||
"--index-url",
|
||||
help="Change index URL (defaults to {})".format(pip_defaults.index_url),
|
||||
envvar="PIP_INDEX_URL",
|
||||
)
|
||||
@click.option(
|
||||
"--extra-index-url",
|
||||
multiple=True,
|
||||
help="Add additional index URL to search",
|
||||
envvar="PIP_EXTRA_INDEX_URL",
|
||||
)
|
||||
@click.option("--cert", help="Path to alternate CA bundle.")
|
||||
@click.option(
|
||||
"--client-cert",
|
||||
help="Path to SSL client certificate, a single file containing "
|
||||
"the private key and the certificate in PEM format.",
|
||||
)
|
||||
@click.option(
|
||||
"--trusted-host",
|
||||
multiple=True,
|
||||
envvar="PIP_TRUSTED_HOST",
|
||||
help="Mark this host as trusted, even though it does not have "
|
||||
"valid or any HTTPS.",
|
||||
)
|
||||
@click.option(
|
||||
"--header/--no-header",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Add header to generated file",
|
||||
)
|
||||
@click.option(
|
||||
"--index/--no-index",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Add index URL to generated file",
|
||||
)
|
||||
@click.option(
|
||||
"--emit-trusted-host/--no-emit-trusted-host",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Add trusted host option to generated file",
|
||||
)
|
||||
@click.option(
|
||||
"--annotate/--no-annotate",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Annotate results, indicating where dependencies come from",
|
||||
)
|
||||
@click.option(
|
||||
"-U",
|
||||
"--upgrade",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Try to upgrade all dependencies to their latest versions",
|
||||
)
|
||||
@click.option(
|
||||
"-P",
|
||||
"--upgrade-package",
|
||||
"upgrade_packages",
|
||||
nargs=1,
|
||||
multiple=True,
|
||||
help="Specify particular packages to upgrade.",
|
||||
)
|
||||
@click.option(
|
||||
"-o",
|
||||
"--output-file",
|
||||
nargs=1,
|
||||
default=None,
|
||||
type=click.File("w+b", atomic=True, lazy=True),
|
||||
help=(
|
||||
"Output file name. Required if more than one input file is given. "
|
||||
"Will be derived from input file otherwise."
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--allow-unsafe",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Pin packages considered unsafe: {}".format(
|
||||
", ".join(sorted(UNSAFE_PACKAGES))
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--generate-hashes",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Generate pip 8 style hashes in the resulting requirements file.",
|
||||
)
|
||||
@click.option(
|
||||
"--max-rounds",
|
||||
default=10,
|
||||
help="Maximum number of rounds before resolving the requirements aborts.",
|
||||
)
|
||||
@click.argument("src_files", nargs=-1, type=click.Path(exists=True, allow_dash=True))
|
||||
@click.option(
|
||||
"--build-isolation/--no-build-isolation",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Enable isolation when building a modern source distribution. "
|
||||
"Build dependencies specified by PEP 518 must be already installed "
|
||||
"if build isolation is disabled.",
|
||||
)
|
||||
@click.option(
|
||||
"--emit-find-links/--no-emit-find-links",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Add the find-links option to generated file",
|
||||
)
|
||||
@click.option(
|
||||
"--cache-dir",
|
||||
help="Store the cache data in DIRECTORY.",
|
||||
default=CACHE_DIR,
|
||||
envvar="PIP_TOOLS_CACHE_DIR",
|
||||
show_default=True,
|
||||
show_envvar=True,
|
||||
type=click.Path(file_okay=False, writable=True),
|
||||
)
|
||||
@click.option("--pip-args", help="Arguments to pass directly to the pip command.")
|
||||
def cli(
|
||||
ctx,
|
||||
verbose,
|
||||
quiet,
|
||||
dry_run,
|
||||
pre,
|
||||
rebuild,
|
||||
find_links,
|
||||
index_url,
|
||||
extra_index_url,
|
||||
cert,
|
||||
client_cert,
|
||||
trusted_host,
|
||||
header,
|
||||
index,
|
||||
emit_trusted_host,
|
||||
annotate,
|
||||
upgrade,
|
||||
upgrade_packages,
|
||||
output_file,
|
||||
allow_unsafe,
|
||||
generate_hashes,
|
||||
src_files,
|
||||
max_rounds,
|
||||
build_isolation,
|
||||
emit_find_links,
|
||||
cache_dir,
|
||||
pip_args,
|
||||
):
|
||||
"""Compiles requirements.txt from requirements.in specs."""
|
||||
log.verbosity = verbose - quiet
|
||||
|
||||
if len(src_files) == 0:
|
||||
if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
|
||||
src_files = (DEFAULT_REQUIREMENTS_FILE,)
|
||||
elif os.path.exists("setup.py"):
|
||||
src_files = ("setup.py",)
|
||||
else:
|
||||
raise click.BadParameter(
|
||||
(
|
||||
"If you do not specify an input file, "
|
||||
"the default is {} or setup.py"
|
||||
).format(DEFAULT_REQUIREMENTS_FILE)
|
||||
)
|
||||
|
||||
if not output_file:
|
||||
# An output file must be provided for stdin
|
||||
if src_files == ("-",):
|
||||
raise click.BadParameter("--output-file is required if input is from stdin")
|
||||
# Use default requirements output file if there is a setup.py the source file
|
||||
elif src_files == ("setup.py",):
|
||||
file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE
|
||||
# An output file must be provided if there are multiple source files
|
||||
elif len(src_files) > 1:
|
||||
raise click.BadParameter(
|
||||
"--output-file is required if two or more input files are given."
|
||||
)
|
||||
# Otherwise derive the output file from the source file
|
||||
else:
|
||||
base_name = src_files[0].rsplit(".", 1)[0]
|
||||
file_name = base_name + ".txt"
|
||||
|
||||
output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True)
|
||||
|
||||
# Close the file at the end of the context execution
|
||||
ctx.call_on_close(safecall(output_file.close_intelligently))
|
||||
|
||||
###
|
||||
# Setup
|
||||
###
|
||||
|
||||
right_args = shlex.split(pip_args or "")
|
||||
pip_args = []
|
||||
if find_links:
|
||||
for link in find_links:
|
||||
pip_args.extend(["-f", link])
|
||||
if index_url:
|
||||
pip_args.extend(["-i", index_url])
|
||||
if extra_index_url:
|
||||
for extra_index in extra_index_url:
|
||||
pip_args.extend(["--extra-index-url", extra_index])
|
||||
if cert:
|
||||
pip_args.extend(["--cert", cert])
|
||||
if client_cert:
|
||||
pip_args.extend(["--client-cert", client_cert])
|
||||
if pre:
|
||||
pip_args.extend(["--pre"])
|
||||
if trusted_host:
|
||||
for host in trusted_host:
|
||||
pip_args.extend(["--trusted-host", host])
|
||||
|
||||
if not build_isolation:
|
||||
pip_args.append("--no-build-isolation")
|
||||
pip_args.extend(right_args)
|
||||
|
||||
repository = PyPIRepository(pip_args, cache_dir=cache_dir)
|
||||
|
||||
# Parse all constraints coming from --upgrade-package/-P
|
||||
upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages)
|
||||
upgrade_install_reqs = {
|
||||
key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen
|
||||
}
|
||||
|
||||
existing_pins_to_upgrade = set()
|
||||
|
||||
# Proxy with a LocalRequirementsRepository if --upgrade is not specified
|
||||
# (= default invocation)
|
||||
if not upgrade and os.path.exists(output_file.name):
|
||||
# Use a temporary repository to ensure outdated(removed) options from
|
||||
# existing requirements.txt wouldn't get into the current repository.
|
||||
tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir)
|
||||
ireqs = parse_requirements(
|
||||
output_file.name,
|
||||
finder=tmp_repository.finder,
|
||||
session=tmp_repository.session,
|
||||
options=tmp_repository.options,
|
||||
)
|
||||
|
||||
# Exclude packages from --upgrade-package/-P from the existing
|
||||
# constraints, and separately gather pins to be upgraded
|
||||
existing_pins = {}
|
||||
for ireq in filter(is_pinned_requirement, ireqs):
|
||||
key = key_from_ireq(ireq)
|
||||
if key in upgrade_install_reqs:
|
||||
existing_pins_to_upgrade.add(key)
|
||||
else:
|
||||
existing_pins[key] = ireq
|
||||
repository = LocalRequirementsRepository(existing_pins, repository)
|
||||
|
||||
###
|
||||
# Parsing/collecting initial requirements
|
||||
###
|
||||
|
||||
constraints = []
|
||||
for src_file in src_files:
|
||||
is_setup_file = os.path.basename(src_file) == "setup.py"
|
||||
if is_setup_file or src_file == "-":
|
||||
# pip requires filenames and not files. Since we want to support
|
||||
# piping from stdin, we need to briefly save the input from stdin
|
||||
# to a temporary file and have pip read that. also used for
|
||||
# reading requirements from install_requires in setup.py.
|
||||
tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
|
||||
if is_setup_file:
|
||||
from distutils.core import run_setup
|
||||
|
||||
dist = run_setup(src_file)
|
||||
tmpfile.write("\n".join(dist.install_requires))
|
||||
comes_from = "{name} ({filename})".format(
|
||||
name=dist.get_name(), filename=src_file
|
||||
)
|
||||
else:
|
||||
tmpfile.write(sys.stdin.read())
|
||||
comes_from = "-r -"
|
||||
tmpfile.flush()
|
||||
reqs = list(
|
||||
parse_requirements(
|
||||
tmpfile.name,
|
||||
finder=repository.finder,
|
||||
session=repository.session,
|
||||
options=repository.options,
|
||||
)
|
||||
)
|
||||
for req in reqs:
|
||||
req.comes_from = comes_from
|
||||
constraints.extend(reqs)
|
||||
else:
|
||||
constraints.extend(
|
||||
parse_requirements(
|
||||
src_file,
|
||||
finder=repository.finder,
|
||||
session=repository.session,
|
||||
options=repository.options,
|
||||
)
|
||||
)
|
||||
|
||||
primary_packages = {
|
||||
key_from_ireq(ireq) for ireq in constraints if not ireq.constraint
|
||||
}
|
||||
|
||||
allowed_upgrades = primary_packages | existing_pins_to_upgrade
|
||||
constraints.extend(
|
||||
ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades
|
||||
)
|
||||
|
||||
# Filter out pip environment markers which do not match (PEP496)
|
||||
constraints = [
|
||||
req for req in constraints if req.markers is None or req.markers.evaluate()
|
||||
]
|
||||
|
||||
log.debug("Using indexes:")
|
||||
for index_url in dedup(repository.finder.index_urls):
|
||||
log.debug(" {}".format(index_url))
|
||||
|
||||
if repository.finder.find_links:
|
||||
log.debug("")
|
||||
log.debug("Configuration:")
|
||||
for find_link in dedup(repository.finder.find_links):
|
||||
log.debug(" -f {}".format(find_link))
|
||||
|
||||
try:
|
||||
resolver = Resolver(
|
||||
constraints,
|
||||
repository,
|
||||
prereleases=repository.finder.allow_all_prereleases or pre,
|
||||
cache=DependencyCache(cache_dir),
|
||||
clear_caches=rebuild,
|
||||
allow_unsafe=allow_unsafe,
|
||||
)
|
||||
results = resolver.resolve(max_rounds=max_rounds)
|
||||
if generate_hashes:
|
||||
hashes = resolver.resolve_hashes(results)
|
||||
else:
|
||||
hashes = None
|
||||
except PipToolsError as e:
|
||||
log.error(str(e))
|
||||
sys.exit(2)
|
||||
|
||||
log.debug("")
|
||||
|
||||
##
|
||||
# Output
|
||||
##
|
||||
|
||||
writer = OutputWriter(
|
||||
src_files,
|
||||
output_file,
|
||||
click_ctx=ctx,
|
||||
dry_run=dry_run,
|
||||
emit_header=header,
|
||||
emit_index=index,
|
||||
emit_trusted_host=emit_trusted_host,
|
||||
annotate=annotate,
|
||||
generate_hashes=generate_hashes,
|
||||
default_index_url=repository.DEFAULT_INDEX_URL,
|
||||
index_urls=repository.finder.index_urls,
|
||||
trusted_hosts=repository.finder.trusted_hosts,
|
||||
format_control=repository.finder.format_control,
|
||||
allow_unsafe=allow_unsafe,
|
||||
find_links=repository.finder.find_links,
|
||||
emit_find_links=emit_find_links,
|
||||
)
|
||||
writer.write(
|
||||
results=results,
|
||||
unsafe_requirements=resolver.unsafe_constraints,
|
||||
markers={
|
||||
key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers
|
||||
},
|
||||
hashes=hashes,
|
||||
)
|
||||
|
||||
if dry_run:
|
||||
log.info("Dry-run, so nothing updated.")
|
||||
216
Lib/site-packages/pipenv/patched/piptools/scripts/sync.py
Normal file
216
Lib/site-packages/pipenv/patched/piptools/scripts/sync.py
Normal file
@@ -0,0 +1,216 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from ._compat import get_installed_distributions, InstallCommand
|
||||
|
||||
from .. import click, sync
|
||||
from .._compat import parse_requirements
|
||||
from ..exceptions import PipToolsError
|
||||
from ..logging import log
|
||||
from ..repositories import PyPIRepository
|
||||
from ..utils import flat_map
|
||||
|
||||
DEFAULT_REQUIREMENTS_FILE = "requirements.txt"
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.version_option()
|
||||
@click.option(
|
||||
"-a",
|
||||
"--ask",
|
||||
is_flag=True,
|
||||
help="Show what would happen, then ask whether to continue",
|
||||
)
|
||||
@click.option(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
is_flag=True,
|
||||
help="Only show what would happen, don't change anything",
|
||||
)
|
||||
@click.option("--force", is_flag=True, help="Proceed even if conflicts are found")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--find-links",
|
||||
multiple=True,
|
||||
help="Look for archives in this directory or on this HTML page",
|
||||
envvar="PIP_FIND_LINKS",
|
||||
)
|
||||
@click.option(
|
||||
"-i",
|
||||
"--index-url",
|
||||
help="Change index URL (defaults to PyPI)",
|
||||
envvar="PIP_INDEX_URL",
|
||||
)
|
||||
@click.option(
|
||||
"--extra-index-url",
|
||||
multiple=True,
|
||||
help="Add additional index URL to search",
|
||||
envvar="PIP_EXTRA_INDEX_URL",
|
||||
)
|
||||
@click.option(
|
||||
"--trusted-host",
|
||||
multiple=True,
|
||||
help="Mark this host as trusted, even though it does not have valid or any HTTPS.",
|
||||
)
|
||||
@click.option(
|
||||
"--no-index",
|
||||
is_flag=True,
|
||||
help="Ignore package index (only looking at --find-links URLs instead)",
|
||||
)
|
||||
@click.option("-q", "--quiet", default=False, is_flag=True, help="Give less output")
|
||||
@click.option(
|
||||
"--user", "user_only", is_flag=True, help="Restrict attention to user directory"
|
||||
)
|
||||
@click.option("--cert", help="Path to alternate CA bundle.")
|
||||
@click.option(
|
||||
"--client-cert",
|
||||
help="Path to SSL client certificate, a single file containing "
|
||||
"the private key and the certificate in PEM format.",
|
||||
)
|
||||
@click.argument("src_files", required=False, type=click.Path(exists=True), nargs=-1)
|
||||
@click.option("--pip-args", help="Arguments to pass directly to pip install.")
|
||||
def cli(
|
||||
ask,
|
||||
dry_run,
|
||||
force,
|
||||
find_links,
|
||||
index_url,
|
||||
extra_index_url,
|
||||
trusted_host,
|
||||
no_index,
|
||||
quiet,
|
||||
user_only,
|
||||
cert,
|
||||
client_cert,
|
||||
src_files,
|
||||
pip_args,
|
||||
):
|
||||
"""Synchronize virtual environment with requirements.txt."""
|
||||
if not src_files:
|
||||
if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
|
||||
src_files = (DEFAULT_REQUIREMENTS_FILE,)
|
||||
else:
|
||||
msg = "No requirement files given and no {} found in the current directory"
|
||||
log.error(msg.format(DEFAULT_REQUIREMENTS_FILE))
|
||||
sys.exit(2)
|
||||
|
||||
if any(src_file.endswith(".in") for src_file in src_files):
|
||||
msg = (
|
||||
"Some input files have the .in extension, which is most likely an error "
|
||||
"and can cause weird behaviour. You probably meant to use "
|
||||
"the corresponding *.txt file?"
|
||||
)
|
||||
if force:
|
||||
log.warning("WARNING: " + msg)
|
||||
else:
|
||||
log.error("ERROR: " + msg)
|
||||
sys.exit(2)
|
||||
|
||||
install_command = InstallCommand()
|
||||
options, _ = install_command.parse_args([])
|
||||
session = install_command._build_session(options)
|
||||
finder = install_command._build_package_finder(options=options, session=session)
|
||||
|
||||
# Parse requirements file. Note, all options inside requirements file
|
||||
# will be collected by the finder.
|
||||
requirements = flat_map(
|
||||
lambda src: parse_requirements(src, finder=finder, session=session), src_files
|
||||
)
|
||||
|
||||
try:
|
||||
requirements = sync.merge(requirements, ignore_conflicts=force)
|
||||
except PipToolsError as e:
|
||||
log.error(str(e))
|
||||
sys.exit(2)
|
||||
|
||||
installed_dists = get_installed_distributions(skip=[], user_only=user_only)
|
||||
to_install, to_uninstall = sync.diff(requirements, installed_dists)
|
||||
|
||||
install_flags = _compose_install_flags(
|
||||
finder,
|
||||
no_index=no_index,
|
||||
index_url=index_url,
|
||||
extra_index_url=extra_index_url,
|
||||
trusted_host=trusted_host,
|
||||
find_links=find_links,
|
||||
user_only=user_only,
|
||||
cert=cert,
|
||||
client_cert=client_cert,
|
||||
) + shlex.split(pip_args or "")
|
||||
sys.exit(
|
||||
sync.sync(
|
||||
to_install,
|
||||
to_uninstall,
|
||||
verbose=(not quiet),
|
||||
dry_run=dry_run,
|
||||
install_flags=install_flags,
|
||||
ask=ask,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _compose_install_flags(
|
||||
finder,
|
||||
no_index=False,
|
||||
index_url=None,
|
||||
extra_index_url=None,
|
||||
trusted_host=None,
|
||||
find_links=None,
|
||||
user_only=False,
|
||||
cert=None,
|
||||
client_cert=None,
|
||||
):
|
||||
"""
|
||||
Compose install flags with the given finder and CLI options.
|
||||
"""
|
||||
result = []
|
||||
|
||||
# Build --index-url/--extra-index-url/--no-index
|
||||
if no_index:
|
||||
result.append("--no-index")
|
||||
elif index_url:
|
||||
result.extend(["--index-url", index_url])
|
||||
elif finder.index_urls:
|
||||
finder_index_url = finder.index_urls[0]
|
||||
if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL:
|
||||
result.extend(["--index-url", finder_index_url])
|
||||
for extra_index in finder.index_urls[1:]:
|
||||
result.extend(["--extra-index-url", extra_index])
|
||||
else:
|
||||
result.append("--no-index")
|
||||
|
||||
for extra_index in extra_index_url or []:
|
||||
result.extend(["--extra-index-url", extra_index])
|
||||
|
||||
# Build --trusted-hosts
|
||||
for host in itertools.chain(trusted_host or [], finder.trusted_hosts):
|
||||
result.extend(["--trusted-host", host])
|
||||
|
||||
# Build --find-links
|
||||
for link in itertools.chain(find_links or [], finder.find_links):
|
||||
result.extend(["--find-links", link])
|
||||
|
||||
# Build format controls --no-binary/--only-binary
|
||||
for format_control in ("no_binary", "only_binary"):
|
||||
formats = getattr(finder.format_control, format_control)
|
||||
if not formats:
|
||||
continue
|
||||
result.extend(
|
||||
["--" + format_control.replace("_", "-"), ",".join(sorted(formats))]
|
||||
)
|
||||
|
||||
if user_only:
|
||||
result.append("--user")
|
||||
|
||||
if cert:
|
||||
result.extend(["--cert", cert])
|
||||
|
||||
if client_cert:
|
||||
result.extend(["--client-cert", client_cert])
|
||||
|
||||
return result
|
||||
218
Lib/site-packages/pipenv/patched/piptools/sync.py
Normal file
218
Lib/site-packages/pipenv/patched/piptools/sync.py
Normal file
@@ -0,0 +1,218 @@
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from subprocess import check_call # nosec
|
||||
|
||||
from ._compat import DEV_PKGS
|
||||
from ._compat import stdlib_pkgs
|
||||
|
||||
from . import click
|
||||
from .exceptions import IncompatibleRequirements
|
||||
from .utils import (
|
||||
flat_map,
|
||||
format_requirement,
|
||||
get_hashes_from_ireq,
|
||||
is_url_requirement,
|
||||
key_from_ireq,
|
||||
key_from_req,
|
||||
)
|
||||
|
||||
PACKAGES_TO_IGNORE = (
|
||||
["-markerlib", "pip", "pip-tools", "pip-review", "pkg-resources"]
|
||||
+ list(stdlib_pkgs)
|
||||
+ list(DEV_PKGS)
|
||||
)
|
||||
|
||||
|
||||
def dependency_tree(installed_keys, root_key):
|
||||
"""
|
||||
Calculate the dependency tree for the package `root_key` and return
|
||||
a collection of all its dependencies. Uses a DFS traversal algorithm.
|
||||
|
||||
`installed_keys` should be a {key: requirement} mapping, e.g.
|
||||
{'django': from_line('django==1.8')}
|
||||
`root_key` should be the key to return the dependency tree for.
|
||||
"""
|
||||
dependencies = set()
|
||||
queue = collections.deque()
|
||||
|
||||
if root_key in installed_keys:
|
||||
dep = installed_keys[root_key]
|
||||
queue.append(dep)
|
||||
|
||||
while queue:
|
||||
v = queue.popleft()
|
||||
key = key_from_req(v)
|
||||
if key in dependencies:
|
||||
continue
|
||||
|
||||
dependencies.add(key)
|
||||
|
||||
for dep_specifier in v.requires():
|
||||
dep_name = key_from_req(dep_specifier)
|
||||
if dep_name in installed_keys:
|
||||
dep = installed_keys[dep_name]
|
||||
|
||||
if dep_specifier.specifier.contains(dep.version):
|
||||
queue.append(dep)
|
||||
|
||||
return dependencies
|
||||
|
||||
|
||||
def get_dists_to_ignore(installed):
|
||||
"""
|
||||
Returns a collection of package names to ignore when performing pip-sync,
|
||||
based on the currently installed environment. For example, when pip-tools
|
||||
is installed in the local environment, it should be ignored, including all
|
||||
of its dependencies (e.g. click). When pip-tools is not installed
|
||||
locally, click should also be installed/uninstalled depending on the given
|
||||
requirements.
|
||||
"""
|
||||
installed_keys = {key_from_req(r): r for r in installed}
|
||||
return list(
|
||||
flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE)
|
||||
)
|
||||
|
||||
|
||||
def merge(requirements, ignore_conflicts):
|
||||
by_key = {}
|
||||
|
||||
for ireq in requirements:
|
||||
# Limitation: URL requirements are merged by precise string match, so
|
||||
# "file:///example.zip#egg=example", "file:///example.zip", and
|
||||
# "example==1.0" will not merge with each other
|
||||
if ireq.match_markers():
|
||||
key = key_from_ireq(ireq)
|
||||
|
||||
if not ignore_conflicts:
|
||||
existing_ireq = by_key.get(key)
|
||||
if existing_ireq:
|
||||
# NOTE: We check equality here since we can assume that the
|
||||
# requirements are all pinned
|
||||
if ireq.specifier != existing_ireq.specifier:
|
||||
raise IncompatibleRequirements(ireq, existing_ireq)
|
||||
|
||||
# TODO: Always pick the largest specifier in case of a conflict
|
||||
by_key[key] = ireq
|
||||
return by_key.values()
|
||||
|
||||
|
||||
def diff_key_from_ireq(ireq):
|
||||
"""
|
||||
Calculate a key for comparing a compiled requirement with installed modules.
|
||||
For URL requirements, only provide a useful key if the url includes
|
||||
#egg=name==version, which will set ireq.req.name and ireq.specifier.
|
||||
Otherwise return ireq.link so the key will not match and the package will
|
||||
reinstall. Reinstall is necessary to ensure that packages will reinstall
|
||||
if the URL is changed but the version is not.
|
||||
"""
|
||||
if is_url_requirement(ireq):
|
||||
if (
|
||||
ireq.req
|
||||
and (getattr(ireq.req, "key", None) or getattr(ireq.req, "name", None))
|
||||
and ireq.specifier
|
||||
):
|
||||
return key_from_ireq(ireq)
|
||||
return str(ireq.link)
|
||||
return key_from_ireq(ireq)
|
||||
|
||||
|
||||
def diff(compiled_requirements, installed_dists):
|
||||
"""
|
||||
Calculate which packages should be installed or uninstalled, given a set
|
||||
of compiled requirements and a list of currently installed modules.
|
||||
"""
|
||||
requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements}
|
||||
|
||||
satisfied = set() # holds keys
|
||||
to_install = set() # holds InstallRequirement objects
|
||||
to_uninstall = set() # holds keys
|
||||
|
||||
pkgs_to_ignore = get_dists_to_ignore(installed_dists)
|
||||
for dist in installed_dists:
|
||||
key = key_from_req(dist)
|
||||
if key not in requirements_lut or not requirements_lut[key].match_markers():
|
||||
to_uninstall.add(key)
|
||||
elif requirements_lut[key].specifier.contains(dist.version):
|
||||
satisfied.add(key)
|
||||
|
||||
for key, requirement in requirements_lut.items():
|
||||
if key not in satisfied and requirement.match_markers():
|
||||
to_install.add(requirement)
|
||||
|
||||
# Make sure to not uninstall any packages that should be ignored
|
||||
to_uninstall -= set(pkgs_to_ignore)
|
||||
|
||||
return (to_install, to_uninstall)
|
||||
|
||||
|
||||
def sync(
|
||||
to_install,
|
||||
to_uninstall,
|
||||
verbose=False,
|
||||
dry_run=False,
|
||||
install_flags=None,
|
||||
ask=False,
|
||||
):
|
||||
"""
|
||||
Install and uninstalls the given sets of modules.
|
||||
"""
|
||||
if not to_uninstall and not to_install:
|
||||
if verbose:
|
||||
click.echo("Everything up-to-date")
|
||||
return 0
|
||||
|
||||
pip_flags = []
|
||||
if not verbose:
|
||||
pip_flags += ["-q"]
|
||||
|
||||
if ask:
|
||||
dry_run = True
|
||||
|
||||
if dry_run:
|
||||
if to_uninstall:
|
||||
click.echo("Would uninstall:")
|
||||
for pkg in to_uninstall:
|
||||
click.echo(" {}".format(pkg))
|
||||
|
||||
if to_install:
|
||||
click.echo("Would install:")
|
||||
for ireq in to_install:
|
||||
click.echo(" {}".format(format_requirement(ireq)))
|
||||
|
||||
if ask and click.confirm("Would you like to proceed with these changes?"):
|
||||
dry_run = False
|
||||
|
||||
if not dry_run:
|
||||
if to_uninstall:
|
||||
check_call( # nosec
|
||||
[sys.executable, "-m", "pip", "uninstall", "-y"]
|
||||
+ pip_flags
|
||||
+ sorted(to_uninstall)
|
||||
)
|
||||
|
||||
if to_install:
|
||||
if install_flags is None:
|
||||
install_flags = []
|
||||
# prepare requirement lines
|
||||
req_lines = []
|
||||
for ireq in sorted(to_install, key=key_from_ireq):
|
||||
ireq_hashes = get_hashes_from_ireq(ireq)
|
||||
req_lines.append(format_requirement(ireq, hashes=ireq_hashes))
|
||||
|
||||
# save requirement lines to a temporary file
|
||||
tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False)
|
||||
tmp_req_file.write("\n".join(req_lines))
|
||||
tmp_req_file.close()
|
||||
|
||||
try:
|
||||
check_call( # nosec
|
||||
[sys.executable, "-m", "pip", "install", "-r", tmp_req_file.name]
|
||||
+ pip_flags
|
||||
+ install_flags
|
||||
)
|
||||
finally:
|
||||
os.unlink(tmp_req_file.name)
|
||||
|
||||
return 0
|
||||
495
Lib/site-packages/pipenv/patched/piptools/utils.py
Normal file
495
Lib/site-packages/pipenv/patched/piptools/utils.py
Normal file
@@ -0,0 +1,495 @@
|
||||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from itertools import chain, groupby
|
||||
|
||||
import six
|
||||
from click.utils import LazyFile
|
||||
from ._compat import install_req_from_line
|
||||
from six.moves import shlex_quote
|
||||
from pipenv.vendor.packaging.specifiers import SpecifierSet, InvalidSpecifier
|
||||
from pipenv.vendor.packaging.version import Version, InvalidVersion, parse as parse_version
|
||||
from pipenv.vendor.packaging.markers import Marker, Op, Value, Variable
|
||||
|
||||
|
||||
from ._compat import PIP_VERSION
|
||||
from .click import style
|
||||
|
||||
UNSAFE_PACKAGES = {"setuptools", "distribute", "pip"}
|
||||
COMPILE_EXCLUDE_OPTIONS = {
|
||||
"--dry-run",
|
||||
"--quiet",
|
||||
"--rebuild",
|
||||
"--upgrade",
|
||||
"--upgrade-package",
|
||||
"--verbose",
|
||||
"--cache-dir",
|
||||
}
|
||||
|
||||
|
||||
def simplify_markers(ireq):
|
||||
"""simplify_markers "This code cleans up markers for a specific :class:`~InstallRequirement`"
|
||||
|
||||
Clean and deduplicate markers.
|
||||
|
||||
:param ireq: An InstallRequirement to clean
|
||||
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
:return: An InstallRequirement with cleaned Markers
|
||||
:rtype: :class:`~pip._internal.req.req_install.InstallRequirement`
|
||||
"""
|
||||
|
||||
if not getattr(ireq, 'markers', None):
|
||||
return ireq
|
||||
markers = ireq.markers
|
||||
marker_list = []
|
||||
if isinstance(markers, six.string_types):
|
||||
if ';' in markers:
|
||||
markers = [Marker(m_str.strip()) for m_str in markers.split(';')]
|
||||
else:
|
||||
markers = Marker(markers)
|
||||
for m in markers._markers:
|
||||
_single_marker = []
|
||||
if isinstance(m[0], six.string_types):
|
||||
continue
|
||||
if not isinstance(m[0], (list, tuple)):
|
||||
marker_list.append(''.join([_piece.serialize() for _piece in m]))
|
||||
continue
|
||||
for _marker_part in m:
|
||||
if isinstance(_marker_part, six.string_types):
|
||||
_single_marker.append(_marker_part)
|
||||
continue
|
||||
_single_marker.append(''.join([_piece.serialize() for _piece in _marker_part]))
|
||||
_single_marker = [_m.strip() for _m in _single_marker]
|
||||
marker_list.append(tuple(_single_marker,))
|
||||
marker_str = ' and '.join(list(dedup(tuple(marker_list,)))) if marker_list else ''
|
||||
new_markers = Marker(marker_str)
|
||||
ireq.markers = new_markers
|
||||
new_ireq = install_req_from_line(format_requirement(ireq))
|
||||
if ireq.constraint:
|
||||
new_ireq.constraint = ireq.constraint
|
||||
return new_ireq
|
||||
|
||||
|
||||
def clean_requires_python(candidates):
|
||||
"""Get a cleaned list of all the candidates with valid specifiers in the `requires_python` attributes."""
|
||||
all_candidates = []
|
||||
py_version = parse_version(os.environ.get('PIPENV_REQUESTED_PYTHON_VERSION', '.'.join(map(str, sys.version_info[:3]))))
|
||||
for c in candidates:
|
||||
if getattr(c, "requires_python", None):
|
||||
# Old specifications had people setting this to single digits
|
||||
# which is effectively the same as '>=digit,<digit+1'
|
||||
if len(c.requires_python) == 1 and c.requires_python in ("2", "3"):
|
||||
c.requires_python = '>={0},<{1!s}'.format(c.requires_python, int(c.requires_python) + 1)
|
||||
try:
|
||||
specifierset = SpecifierSet(c.requires_python)
|
||||
except InvalidSpecifier:
|
||||
continue
|
||||
else:
|
||||
if not specifierset.contains(py_version):
|
||||
continue
|
||||
all_candidates.append(c)
|
||||
return all_candidates
|
||||
|
||||
|
||||
def key_from_ireq(ireq):
|
||||
"""Get a standardized key for an InstallRequirement."""
|
||||
if ireq.req is None and ireq.link is not None:
|
||||
return str(ireq.link)
|
||||
else:
|
||||
return key_from_req(ireq.req)
|
||||
|
||||
|
||||
def key_from_req(req):
|
||||
"""Get an all-lowercase version of the requirement's name."""
|
||||
if hasattr(req, "key"):
|
||||
# from pkg_resources, such as installed dists for pip-sync
|
||||
key = req.key
|
||||
else:
|
||||
# from packaging, such as install requirements from requirements.txt
|
||||
key = req.name
|
||||
|
||||
key = key.replace("_", "-").lower()
|
||||
return key
|
||||
|
||||
|
||||
def comment(text):
|
||||
return style(text, fg="green")
|
||||
|
||||
|
||||
def make_install_requirement(name, version, extras, markers, constraint=False):
|
||||
# If no extras are specified, the extras string is blank
|
||||
extras_string = ""
|
||||
if extras:
|
||||
# Sort extras for stability
|
||||
extras_string = "[{}]".format(",".join(sorted(extras)))
|
||||
|
||||
if not markers:
|
||||
return install_req_from_line(
|
||||
str('{}{}=={}'.format(name, extras_string, version)),
|
||||
constraint=constraint)
|
||||
else:
|
||||
return install_req_from_line(
|
||||
str('{}{}=={}; {}'.format(name, extras_string, version, str(markers))),
|
||||
constraint=constraint)
|
||||
|
||||
|
||||
def _requirement_to_str_lowercase_name(requirement):
|
||||
"""
|
||||
Formats a packaging.requirements.Requirement with a lowercase name.
|
||||
|
||||
This is simply a copy of
|
||||
https://github.com/pypa/pipenv/patched/packaging/blob/pipenv/patched/16.8/packaging/requirements.py#L109-L124
|
||||
modified to lowercase the dependency name.
|
||||
|
||||
Previously, we were invoking the original Requirement.__str__ method and
|
||||
lowercasing the entire result, which would lowercase the name, *and* other,
|
||||
important stuff that should not be lowercased (such as the marker). See
|
||||
this issue for more information: https://github.com/pypa/pipenv/patched/pipenv/issues/2113.
|
||||
"""
|
||||
parts = [requirement.name.lower()]
|
||||
|
||||
if requirement.extras:
|
||||
parts.append("[{0}]".format(",".join(sorted(requirement.extras))))
|
||||
|
||||
if requirement.specifier:
|
||||
parts.append(str(requirement.specifier))
|
||||
|
||||
if requirement.url:
|
||||
parts.append("@ {0}".format(requirement.url))
|
||||
|
||||
if requirement.marker:
|
||||
parts.append("; {0}".format(requirement.marker))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def is_url_requirement(ireq):
|
||||
"""
|
||||
Return True if requirement was specified as a path or URL.
|
||||
ireq.original_link will have been set by InstallRequirement.__init__
|
||||
"""
|
||||
return bool(ireq.original_link)
|
||||
|
||||
|
||||
def format_requirement(ireq, marker=None, hashes=None):
|
||||
"""
|
||||
Generic formatter for pretty printing InstallRequirements to the terminal
|
||||
in a less verbose way than using its `__str__` method.
|
||||
"""
|
||||
if ireq.editable:
|
||||
line = "-e {}".format(ireq.link.url)
|
||||
elif ireq.link and ireq.link.is_vcs:
|
||||
line = str(ireq.req)
|
||||
elif is_url_requirement(ireq):
|
||||
line = ireq.link.url
|
||||
else:
|
||||
line = _requirement_to_str_lowercase_name(ireq.req)
|
||||
|
||||
if marker and ';' not in line:
|
||||
line = "{}; {}".format(line, marker)
|
||||
|
||||
if hashes:
|
||||
for hash_ in sorted(hashes):
|
||||
line += " \\\n --hash={}".format(hash_)
|
||||
|
||||
return line
|
||||
|
||||
|
||||
def format_specifier(ireq):
|
||||
"""
|
||||
Generic formatter for pretty printing the specifier part of
|
||||
InstallRequirements to the terminal.
|
||||
"""
|
||||
# TODO: Ideally, this is carried over to the pip library itself
|
||||
specs = ireq.specifier._specs if ireq.req is not None else []
|
||||
specs = sorted(specs, key=lambda x: x._spec[1])
|
||||
return ",".join(str(s) for s in specs) or "<any>"
|
||||
|
||||
|
||||
def is_pinned_requirement(ireq):
|
||||
"""
|
||||
Returns whether an InstallRequirement is a "pinned" requirement.
|
||||
|
||||
An InstallRequirement is considered pinned if:
|
||||
|
||||
- Is not editable
|
||||
- It has exactly one specifier
|
||||
- That specifier is "=="
|
||||
- The version does not contain a wildcard
|
||||
|
||||
Examples:
|
||||
django==1.8 # pinned
|
||||
django>1.8 # NOT pinned
|
||||
django~=1.8 # NOT pinned
|
||||
django==1.* # NOT pinned
|
||||
"""
|
||||
if ireq.editable:
|
||||
return False
|
||||
|
||||
if ireq.req is None or len(ireq.specifier._specs) != 1:
|
||||
return False
|
||||
|
||||
op, version = next(iter(ireq.specifier._specs))._spec
|
||||
return (op == "==" or op == "===") and not version.endswith(".*")
|
||||
|
||||
|
||||
def as_tuple(ireq):
|
||||
"""
|
||||
Pulls out the (name: str, version:str, extras:(str)) tuple from
|
||||
the pinned InstallRequirement.
|
||||
"""
|
||||
if not is_pinned_requirement(ireq):
|
||||
raise TypeError("Expected a pinned InstallRequirement, got {}".format(ireq))
|
||||
|
||||
name = key_from_ireq(ireq)
|
||||
version = next(iter(ireq.specifier._specs))._spec[1]
|
||||
extras = tuple(sorted(ireq.extras))
|
||||
return name, version, extras
|
||||
|
||||
|
||||
def full_groupby(iterable, key=None):
|
||||
"""Like groupby(), but sorts the input on the group key first."""
|
||||
return groupby(sorted(iterable, key=key), key=key)
|
||||
|
||||
|
||||
def flat_map(fn, collection):
|
||||
"""Map a function over a collection and flatten the result by one-level"""
|
||||
return chain.from_iterable(map(fn, collection))
|
||||
|
||||
|
||||
def lookup_table(values, key=None, keyval=None, unique=False, use_lists=False):
|
||||
"""
|
||||
Builds a dict-based lookup table (index) elegantly.
|
||||
|
||||
Supports building normal and unique lookup tables. For example:
|
||||
|
||||
>>> assert lookup_table(
|
||||
... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0]) == {
|
||||
... 'b': {'bar', 'baz'},
|
||||
... 'f': {'foo'},
|
||||
... 'q': {'quux', 'qux'}
|
||||
... }
|
||||
|
||||
For key functions that uniquely identify values, set unique=True:
|
||||
|
||||
>>> assert lookup_table(
|
||||
... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
|
||||
... unique=True) == {
|
||||
... 'b': 'baz',
|
||||
... 'f': 'foo',
|
||||
... 'q': 'quux'
|
||||
... }
|
||||
|
||||
For the values represented as lists, set use_lists=True:
|
||||
|
||||
>>> assert lookup_table(
|
||||
... ['foo', 'bar', 'baz', 'qux', 'quux'], lambda s: s[0],
|
||||
... use_lists=True) == {
|
||||
... 'b': ['bar', 'baz'],
|
||||
... 'f': ['foo'],
|
||||
... 'q': ['qux', 'quux']
|
||||
... }
|
||||
|
||||
The values of the resulting lookup table will be lists, not sets.
|
||||
|
||||
For extra power, you can even change the values while building up the LUT.
|
||||
To do so, use the `keyval` function instead of the `key` arg:
|
||||
|
||||
>>> assert lookup_table(
|
||||
... ['foo', 'bar', 'baz', 'qux', 'quux'],
|
||||
... keyval=lambda s: (s[0], s[1:])) == {
|
||||
... 'b': {'ar', 'az'},
|
||||
... 'f': {'oo'},
|
||||
... 'q': {'uux', 'ux'}
|
||||
... }
|
||||
|
||||
"""
|
||||
if keyval is None:
|
||||
if key is None:
|
||||
|
||||
def keyval(v):
|
||||
return v
|
||||
|
||||
else:
|
||||
|
||||
def keyval(v):
|
||||
return (key(v), v)
|
||||
|
||||
if unique:
|
||||
return dict(keyval(v) for v in values)
|
||||
|
||||
lut = {}
|
||||
for value in values:
|
||||
k, v = keyval(value)
|
||||
try:
|
||||
s = lut[k]
|
||||
except KeyError:
|
||||
if use_lists:
|
||||
s = lut[k] = list()
|
||||
else:
|
||||
s = lut[k] = set()
|
||||
if use_lists:
|
||||
s.append(v)
|
||||
else:
|
||||
s.add(v)
|
||||
return dict(lut)
|
||||
|
||||
|
||||
def dedup(iterable):
|
||||
"""Deduplicate an iterable object like iter(set(iterable)) but
|
||||
order-preserved.
|
||||
"""
|
||||
return iter(OrderedDict.fromkeys(iterable))
|
||||
|
||||
|
||||
def name_from_req(req):
|
||||
"""Get the name of the requirement"""
|
||||
if hasattr(req, "project_name"):
|
||||
# from pkg_resources, such as installed dists for pip-sync
|
||||
return req.project_name
|
||||
else:
|
||||
# from packaging, such as install requirements from requirements.txt
|
||||
return req.name
|
||||
|
||||
|
||||
def fs_str(string):
|
||||
"""
|
||||
Convert given string to a correctly encoded filesystem string.
|
||||
|
||||
On Python 2, if the input string is unicode, converts it to bytes
|
||||
encoded with the filesystem encoding.
|
||||
|
||||
On Python 3 returns the string as is, since Python 3 uses unicode
|
||||
paths and the input string shouldn't be bytes.
|
||||
|
||||
:type string: str|unicode
|
||||
:rtype: str
|
||||
"""
|
||||
if isinstance(string, str):
|
||||
return string
|
||||
if isinstance(string, bytes):
|
||||
raise AssertionError
|
||||
return string.encode(_fs_encoding)
|
||||
|
||||
|
||||
_fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
|
||||
def get_hashes_from_ireq(ireq):
|
||||
"""
|
||||
Given an InstallRequirement, return a list of string hashes in
|
||||
the format "{algorithm}:{hash}". Return an empty list if there are no hashes
|
||||
in the requirement options.
|
||||
"""
|
||||
result = []
|
||||
if PIP_VERSION[:2] <= (20, 0):
|
||||
ireq_hashes = ireq.options.get("hashes", {})
|
||||
else:
|
||||
ireq_hashes = ireq.hash_options
|
||||
for algorithm, hexdigests in ireq_hashes.items():
|
||||
for hash_ in hexdigests:
|
||||
result.append("{}:{}".format(algorithm, hash_))
|
||||
return result
|
||||
|
||||
|
||||
def force_text(s):
|
||||
"""
|
||||
Return a string representing `s`.
|
||||
"""
|
||||
if s is None:
|
||||
return ""
|
||||
if not isinstance(s, six.string_types):
|
||||
return six.text_type(s)
|
||||
return s
|
||||
|
||||
|
||||
def get_compile_command(click_ctx):
|
||||
"""
|
||||
Returns a normalized compile command depending on cli context.
|
||||
|
||||
The command will be normalized by:
|
||||
- expanding options short to long
|
||||
- removing values that are already default
|
||||
- sorting the arguments
|
||||
- removing one-off arguments like '--upgrade'
|
||||
- removing arguments that don't change build behaviour like '--verbose'
|
||||
"""
|
||||
from piptools.scripts.compile import cli
|
||||
|
||||
# Map of the compile cli options (option name -> click.Option)
|
||||
compile_options = {option.name: option for option in cli.params}
|
||||
|
||||
left_args = []
|
||||
right_args = []
|
||||
|
||||
for option_name, value in click_ctx.params.items():
|
||||
option = compile_options[option_name]
|
||||
|
||||
# Get the latest option name (usually it'll be a long name)
|
||||
option_long_name = option.opts[-1]
|
||||
|
||||
# Collect variadic args separately, they will be added
|
||||
# at the end of the command later
|
||||
if option.nargs < 0:
|
||||
# These will necessarily be src_files
|
||||
# Re-add click-stripped '--' if any start with '-'
|
||||
if any(val.startswith("-") and val != "-" for val in value):
|
||||
right_args.append("--")
|
||||
right_args.extend([shlex_quote(force_text(val)) for val in value])
|
||||
continue
|
||||
|
||||
# Exclude one-off options (--upgrade/--upgrade-package/--rebuild/...)
|
||||
# or options that don't change compile behaviour (--verbose/--dry-run/...)
|
||||
if option_long_name in COMPILE_EXCLUDE_OPTIONS:
|
||||
continue
|
||||
|
||||
# Skip options without a value
|
||||
if option.default is None and not value:
|
||||
continue
|
||||
|
||||
# Skip options with a default value
|
||||
if option.default == value:
|
||||
continue
|
||||
|
||||
# Use a file name for file-like objects
|
||||
if isinstance(value, LazyFile):
|
||||
value = value.name
|
||||
|
||||
# Convert value to the list
|
||||
if not isinstance(value, (tuple, list)):
|
||||
value = [value]
|
||||
|
||||
for val in value:
|
||||
# Flags don't have a value, thus add to args true or false option long name
|
||||
if option.is_flag:
|
||||
# If there are false-options, choose an option name depending on a value
|
||||
if option.secondary_opts:
|
||||
# Get the latest false-option
|
||||
secondary_option_long_name = option.secondary_opts[-1]
|
||||
arg = option_long_name if val else secondary_option_long_name
|
||||
# There are no false-options, use true-option
|
||||
else:
|
||||
arg = option_long_name
|
||||
left_args.append(shlex_quote(arg))
|
||||
# Append to args the option with a value
|
||||
else:
|
||||
if option.name == "pip_args":
|
||||
# shlex_quote would produce functional but noisily quoted results,
|
||||
# e.g. --pip-args='--cache-dir='"'"'/tmp/with spaces'"'"''
|
||||
# Instead, we try to get more legible quoting via repr:
|
||||
left_args.append(
|
||||
"{option}={value}".format(
|
||||
option=option_long_name, value=repr(fs_str(force_text(val)))
|
||||
)
|
||||
)
|
||||
else:
|
||||
left_args.append(
|
||||
"{option}={value}".format(
|
||||
option=option_long_name, value=shlex_quote(force_text(val))
|
||||
)
|
||||
)
|
||||
|
||||
return " ".join(["pip-compile"] + sorted(left_args) + sorted(right_args))
|
||||
239
Lib/site-packages/pipenv/patched/piptools/writer.py
Normal file
239
Lib/site-packages/pipenv/patched/piptools/writer.py
Normal file
@@ -0,0 +1,239 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
from itertools import chain
|
||||
|
||||
import six
|
||||
|
||||
from .click import unstyle
|
||||
from .logging import log
|
||||
from .utils import (
|
||||
UNSAFE_PACKAGES,
|
||||
comment,
|
||||
dedup,
|
||||
format_requirement,
|
||||
get_compile_command,
|
||||
key_from_ireq,
|
||||
)
|
||||
|
||||
MESSAGE_UNHASHED_PACKAGE = comment(
|
||||
"# WARNING: pip install will require the following package to be hashed."
|
||||
"\n# Consider using a hashable URL like "
|
||||
"https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip"
|
||||
)
|
||||
|
||||
MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment(
|
||||
"# WARNING: The following packages were not pinned, but pip requires them to be"
|
||||
"\n# pinned when the requirements file includes hashes. "
|
||||
"Consider using the --allow-unsafe flag."
|
||||
)
|
||||
|
||||
MESSAGE_UNSAFE_PACKAGES = comment(
|
||||
"# The following packages are considered to be unsafe in a requirements file:"
|
||||
)
|
||||
|
||||
MESSAGE_UNINSTALLABLE = (
|
||||
"The generated requirements file may be rejected by pip install. "
|
||||
"See # WARNING lines for details."
|
||||
)
|
||||
|
||||
|
||||
strip_comes_from_line_re = re.compile(r" \(line \d+\)$")
|
||||
|
||||
|
||||
def _comes_from_as_string(ireq):
|
||||
if isinstance(ireq.comes_from, six.string_types):
|
||||
return strip_comes_from_line_re.sub("", ireq.comes_from)
|
||||
return key_from_ireq(ireq.comes_from)
|
||||
|
||||
|
||||
class OutputWriter(object):
|
||||
def __init__(
|
||||
self,
|
||||
src_files,
|
||||
dst_file,
|
||||
click_ctx,
|
||||
dry_run,
|
||||
emit_header,
|
||||
emit_index,
|
||||
emit_trusted_host,
|
||||
annotate,
|
||||
generate_hashes,
|
||||
default_index_url,
|
||||
index_urls,
|
||||
trusted_hosts,
|
||||
format_control,
|
||||
allow_unsafe,
|
||||
find_links,
|
||||
emit_find_links,
|
||||
):
|
||||
self.src_files = src_files
|
||||
self.dst_file = dst_file
|
||||
self.click_ctx = click_ctx
|
||||
self.dry_run = dry_run
|
||||
self.emit_header = emit_header
|
||||
self.emit_index = emit_index
|
||||
self.emit_trusted_host = emit_trusted_host
|
||||
self.annotate = annotate
|
||||
self.generate_hashes = generate_hashes
|
||||
self.default_index_url = default_index_url
|
||||
self.index_urls = index_urls
|
||||
self.trusted_hosts = trusted_hosts
|
||||
self.format_control = format_control
|
||||
self.allow_unsafe = allow_unsafe
|
||||
self.find_links = find_links
|
||||
self.emit_find_links = emit_find_links
|
||||
|
||||
def _sort_key(self, ireq):
|
||||
return (not ireq.editable, str(ireq.req).lower())
|
||||
|
||||
def write_header(self):
|
||||
if self.emit_header:
|
||||
yield comment("#")
|
||||
yield comment("# This file is autogenerated by pip-compile")
|
||||
yield comment("# To update, run:")
|
||||
yield comment("#")
|
||||
compile_command = os.environ.get(
|
||||
"CUSTOM_COMPILE_COMMAND"
|
||||
) or get_compile_command(self.click_ctx)
|
||||
yield comment("# {}".format(compile_command))
|
||||
yield comment("#")
|
||||
|
||||
def write_index_options(self):
|
||||
if self.emit_index:
|
||||
for index, index_url in enumerate(dedup(self.index_urls)):
|
||||
if index_url.rstrip("/") == self.default_index_url:
|
||||
continue
|
||||
flag = "--index-url" if index == 0 else "--extra-index-url"
|
||||
yield "{} {}".format(flag, index_url)
|
||||
|
||||
def write_trusted_hosts(self):
|
||||
if self.emit_trusted_host:
|
||||
for trusted_host in dedup(self.trusted_hosts):
|
||||
yield "--trusted-host {}".format(trusted_host)
|
||||
|
||||
def write_format_controls(self):
|
||||
for nb in dedup(sorted(self.format_control.no_binary)):
|
||||
yield "--no-binary {}".format(nb)
|
||||
for ob in dedup(sorted(self.format_control.only_binary)):
|
||||
yield "--only-binary {}".format(ob)
|
||||
|
||||
def write_find_links(self):
|
||||
if self.emit_find_links:
|
||||
for find_link in dedup(self.find_links):
|
||||
yield "--find-links {}".format(find_link)
|
||||
|
||||
def write_flags(self):
|
||||
emitted = False
|
||||
for line in chain(
|
||||
self.write_index_options(),
|
||||
self.write_find_links(),
|
||||
self.write_trusted_hosts(),
|
||||
self.write_format_controls(),
|
||||
):
|
||||
emitted = True
|
||||
yield line
|
||||
if emitted:
|
||||
yield ""
|
||||
|
||||
def _iter_lines(self, results, unsafe_requirements=None, markers=None, hashes=None):
|
||||
# default values
|
||||
unsafe_requirements = unsafe_requirements or []
|
||||
markers = markers or {}
|
||||
hashes = hashes or {}
|
||||
|
||||
# Check for unhashed or unpinned packages if at least one package does have
|
||||
# hashes, which will trigger pip install's --require-hashes mode.
|
||||
warn_uninstallable = False
|
||||
has_hashes = hashes and any(hash for hash in hashes.values())
|
||||
|
||||
yielded = False
|
||||
|
||||
for line in self.write_header():
|
||||
yield line
|
||||
yielded = True
|
||||
for line in self.write_flags():
|
||||
yield line
|
||||
yielded = True
|
||||
|
||||
unsafe_requirements = (
|
||||
{r for r in results if r.name in UNSAFE_PACKAGES}
|
||||
if not unsafe_requirements
|
||||
else unsafe_requirements
|
||||
)
|
||||
packages = {r for r in results if r.name not in UNSAFE_PACKAGES}
|
||||
|
||||
if packages:
|
||||
packages = sorted(packages, key=self._sort_key)
|
||||
for ireq in packages:
|
||||
if has_hashes and not hashes.get(ireq):
|
||||
yield MESSAGE_UNHASHED_PACKAGE
|
||||
warn_uninstallable = True
|
||||
line = self._format_requirement(
|
||||
ireq, markers.get(key_from_ireq(ireq)), hashes=hashes
|
||||
)
|
||||
yield line
|
||||
yielded = True
|
||||
|
||||
if unsafe_requirements:
|
||||
unsafe_requirements = sorted(unsafe_requirements, key=self._sort_key)
|
||||
yield ""
|
||||
yielded = True
|
||||
if has_hashes and not self.allow_unsafe:
|
||||
yield MESSAGE_UNSAFE_PACKAGES_UNPINNED
|
||||
warn_uninstallable = True
|
||||
else:
|
||||
yield MESSAGE_UNSAFE_PACKAGES
|
||||
|
||||
for ireq in unsafe_requirements:
|
||||
ireq_key = key_from_ireq(ireq)
|
||||
if not self.allow_unsafe:
|
||||
yield comment("# {}".format(ireq_key))
|
||||
else:
|
||||
line = self._format_requirement(
|
||||
ireq, marker=markers.get(ireq_key), hashes=hashes
|
||||
)
|
||||
yield line
|
||||
|
||||
# Yield even when there's no real content, so that blank files are written
|
||||
if not yielded:
|
||||
yield ""
|
||||
|
||||
if warn_uninstallable:
|
||||
log.warning(MESSAGE_UNINSTALLABLE)
|
||||
|
||||
def write(self, results, unsafe_requirements, markers, hashes):
|
||||
|
||||
for line in self._iter_lines(results, unsafe_requirements, markers, hashes):
|
||||
log.info(line)
|
||||
if not self.dry_run:
|
||||
self.dst_file.write(unstyle(line).encode("utf-8"))
|
||||
self.dst_file.write(os.linesep.encode("utf-8"))
|
||||
|
||||
def _format_requirement(self, ireq, marker=None, hashes=None):
|
||||
ireq_hashes = (hashes if hashes is not None else {}).get(ireq)
|
||||
|
||||
line = format_requirement(ireq, marker=marker, hashes=ireq_hashes)
|
||||
|
||||
if not self.annotate:
|
||||
return line
|
||||
|
||||
# Annotate what packages or reqs-ins this package is required by
|
||||
required_by = set()
|
||||
if hasattr(ireq, "_source_ireqs"):
|
||||
required_by |= {
|
||||
_comes_from_as_string(src_ireq)
|
||||
for src_ireq in ireq._source_ireqs
|
||||
if src_ireq.comes_from
|
||||
}
|
||||
elif ireq.comes_from:
|
||||
required_by.add(_comes_from_as_string(ireq))
|
||||
if required_by:
|
||||
annotation = ", ".join(sorted(required_by))
|
||||
line = "{:24}{}{}".format(
|
||||
line,
|
||||
" \\\n " if ireq_hashes else " ",
|
||||
comment("# via " + annotation),
|
||||
)
|
||||
return line
|
||||
Reference in New Issue
Block a user