login page
This commit is contained in:
14
Lib/site-packages/pipenv/vendor/Makefile
vendored
Normal file
14
Lib/site-packages/pipenv/vendor/Makefile
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Taken from pip: https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/src/pip/_vendor/Makefile
|
||||
all: clean vendor
|
||||
|
||||
clean:
|
||||
@# Delete vendored items
|
||||
find . -maxdepth 1 -mindepth 1 -type d -exec rm -rf {} \;
|
||||
|
||||
vendor:
|
||||
@# Install vendored libraries
|
||||
pip install -t . -r vendor.txt
|
||||
|
||||
@# Cleanup .egg-info directories
|
||||
rm -rf *.egg-info
|
||||
rm -rf *.dist-info
|
||||
22
Lib/site-packages/pipenv/vendor/README.md
vendored
Normal file
22
Lib/site-packages/pipenv/vendor/README.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# Vendored packages
|
||||
|
||||
These packages are copied as-is from upstream to reduce Pipenv dependencies.
|
||||
They should always be kept synced with upstream. DO NOT MODIFY DIRECTLY! If
|
||||
you need to patch anything, move the package to `patched` and generate a
|
||||
patch for it using `git diff -p <dependency_root_dir>`. This patch belongs
|
||||
in `./pipenv/tasks/vendoring/patches/patched/<packagename.patchdesc>.patch`.
|
||||
|
||||
To add a vendored dependency or to update a single dependency, use the
|
||||
vendoring scripts:
|
||||
```
|
||||
pipenv run inv vendoring.update --package="pkgname==versionnum"
|
||||
```
|
||||
|
||||
This will automatically pin the package in `./pipenv/vendor/vendor.txt`
|
||||
or it will update the pin if the package is already present, and it will
|
||||
then update the package and download any necessary licenses (if available).
|
||||
Note that this will not download any dependencies, you must add those each
|
||||
individually.
|
||||
|
||||
When updating, ensure that the corresponding LICENSE files are still
|
||||
up-to-date.
|
||||
0
Lib/site-packages/pipenv/vendor/__init__.py
vendored
Normal file
0
Lib/site-packages/pipenv/vendor/__init__.py
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/appdirs.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/appdirs.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/cached_property.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/cached_property.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/contextlib2.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/contextlib2.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/delegator.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/delegator.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/docopt.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/docopt.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/first.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/first.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/parse.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/parse.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/pipdeptree.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/pipdeptree.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/pyparsing.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/pyparsing.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/scandir.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/scandir.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/semver.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/semver.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/six.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/six.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/__pycache__/zipp.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/__pycache__/zipp.cpython-38.pyc
vendored
Normal file
Binary file not shown.
23
Lib/site-packages/pipenv/vendor/appdirs.LICENSE.txt
vendored
Normal file
23
Lib/site-packages/pipenv/vendor/appdirs.LICENSE.txt
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# This is the MIT license
|
||||
|
||||
Copyright (c) 2010 ActiveState Software Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
608
Lib/site-packages/pipenv/vendor/appdirs.py
vendored
Normal file
608
Lib/site-packages/pipenv/vendor/appdirs.py
vendored
Normal file
@@ -0,0 +1,608 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2005-2010 ActiveState Software Inc.
|
||||
# Copyright (c) 2013 Eddy Petrișor
|
||||
|
||||
"""Utilities for determining application-specific dirs.
|
||||
|
||||
See <http://github.com/ActiveState/appdirs> for details and usage.
|
||||
"""
|
||||
# Dev Notes:
|
||||
# - MSDN on where to store app data files:
|
||||
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
|
||||
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
|
||||
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
|
||||
|
||||
__version__ = "1.4.4"
|
||||
__version_info__ = tuple(int(segment) for segment in __version__.split("."))
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
if sys.platform.startswith('java'):
|
||||
import platform
|
||||
os_name = platform.java_ver()[3][0]
|
||||
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
|
||||
system = 'win32'
|
||||
elif os_name.startswith('Mac'): # "Mac OS X", etc.
|
||||
system = 'darwin'
|
||||
else: # "Linux", "SunOS", "FreeBSD", etc.
|
||||
# Setting this to "linux2" is not ideal, but only Windows or Mac
|
||||
# are actually checked for and the rest of the module expects
|
||||
# *sys.platform* style strings.
|
||||
system = 'linux2'
|
||||
else:
|
||||
system = sys.platform
|
||||
|
||||
|
||||
|
||||
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user data directories are:
|
||||
Mac OS X: ~/Library/Application Support/<AppName>
|
||||
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
|
||||
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
|
||||
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
|
||||
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
|
||||
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||
That means, by default "~/.local/share/<AppName>".
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||
path = os.path.normpath(_get_win_folder(const))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Application Support/')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
r"""Return full path to the user-shared data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"multipath" is an optional parameter only applicable to *nix
|
||||
which indicates that the entire list of data dirs should be
|
||||
returned. By default, the first item from XDG_DATA_DIRS is
|
||||
returned, or '/usr/local/share/<AppName>',
|
||||
if XDG_DATA_DIRS is not set
|
||||
|
||||
Typical site data directories are:
|
||||
Mac OS X: /Library/Application Support/<AppName>
|
||||
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
|
||||
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
|
||||
|
||||
For Unix, this is using the $XDG_DATA_DIRS[0] default.
|
||||
|
||||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('/Library/Application Support')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
# XDG default for $XDG_DATA_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_DATA_DIRS',
|
||||
os.pathsep.join(['/usr/local/share', '/usr/share']))
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||
|
||||
if multipath:
|
||||
path = os.pathsep.join(pathlist)
|
||||
else:
|
||||
path = pathlist[0]
|
||||
return path
|
||||
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific config dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user config directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
||||
That means, by default "~/.config/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
|
||||
r"""Return full path to the user-shared data dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"multipath" is an optional parameter only applicable to *nix
|
||||
which indicates that the entire list of config dirs should be
|
||||
returned. By default, the first item from XDG_CONFIG_DIRS is
|
||||
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
|
||||
|
||||
Typical site config directories are:
|
||||
Mac OS X: same as site_data_dir
|
||||
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
|
||||
$XDG_CONFIG_DIRS
|
||||
Win *: same as site_data_dir
|
||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
|
||||
|
||||
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
|
||||
|
||||
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = site_data_dir(appname, appauthor)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
else:
|
||||
# XDG default for $XDG_CONFIG_DIRS
|
||||
# only first, if multipath is False
|
||||
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
||||
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
|
||||
if appname:
|
||||
if version:
|
||||
appname = os.path.join(appname, version)
|
||||
pathlist = [os.sep.join([x, appname]) for x in pathlist]
|
||||
|
||||
if multipath:
|
||||
path = os.pathsep.join(pathlist)
|
||||
else:
|
||||
path = pathlist[0]
|
||||
return path
|
||||
|
||||
|
||||
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific cache dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"opinion" (boolean) can be False to disable the appending of
|
||||
"Cache" to the base app data dir for Windows. See
|
||||
discussion below.
|
||||
|
||||
Typical user cache directories are:
|
||||
Mac OS X: ~/Library/Caches/<AppName>
|
||||
Unix: ~/.cache/<AppName> (XDG default)
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
|
||||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings go in
|
||||
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
|
||||
app data dir (the default returned by `user_data_dir` above). Apps typically
|
||||
put cache data somewhere *under* the given dir here. Some examples:
|
||||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
||||
...\Acme\SuperApp\Cache\1.0
|
||||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "win32":
|
||||
if appauthor is None:
|
||||
appauthor = appname
|
||||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
||||
if appname:
|
||||
if appauthor is not False:
|
||||
path = os.path.join(path, appauthor, appname)
|
||||
else:
|
||||
path = os.path.join(path, appname)
|
||||
if opinion:
|
||||
path = os.path.join(path, "Cache")
|
||||
elif system == 'darwin':
|
||||
path = os.path.expanduser('~/Library/Caches')
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
else:
|
||||
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
|
||||
r"""Return full path to the user-specific state dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"roaming" (boolean, default False) can be set True to use the Windows
|
||||
roaming appdata directory. That means that for users on a Windows
|
||||
network setup for roaming profiles, this user data will be
|
||||
sync'd on login. See
|
||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||
for a discussion of issues.
|
||||
|
||||
Typical user state directories are:
|
||||
Mac OS X: same as user_data_dir
|
||||
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
|
||||
Win *: same as user_data_dir
|
||||
|
||||
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
|
||||
to extend the XDG spec and support $XDG_STATE_HOME.
|
||||
|
||||
That means, by default "~/.local/state/<AppName>".
|
||||
"""
|
||||
if system in ["win32", "darwin"]:
|
||||
path = user_data_dir(appname, appauthor, None, roaming)
|
||||
else:
|
||||
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
|
||||
r"""Return full path to the user-specific log dir for this application.
|
||||
|
||||
"appname" is the name of application.
|
||||
If None, just the system directory is returned.
|
||||
"appauthor" (only used on Windows) is the name of the
|
||||
appauthor or distributing body for this application. Typically
|
||||
it is the owning company name. This falls back to appname. You may
|
||||
pass False to disable it.
|
||||
"version" is an optional version path element to append to the
|
||||
path. You might want to use this if you want multiple versions
|
||||
of your app to be able to run independently. If used, this
|
||||
would typically be "<major>.<minor>".
|
||||
Only applied when appname is present.
|
||||
"opinion" (boolean) can be False to disable the appending of
|
||||
"Logs" to the base app data dir for Windows, and "log" to the
|
||||
base cache dir for Unix. See discussion below.
|
||||
|
||||
Typical user log directories are:
|
||||
Mac OS X: ~/Library/Logs/<AppName>
|
||||
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
|
||||
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
|
||||
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
|
||||
|
||||
On Windows the only suggestion in the MSDN docs is that local settings
|
||||
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||
examples of what some windows apps use for a logs dir.)
|
||||
|
||||
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||
value for Windows and appends "log" to the user cache dir for Unix.
|
||||
This can be disabled with the `opinion=False` option.
|
||||
"""
|
||||
if system == "darwin":
|
||||
path = os.path.join(
|
||||
os.path.expanduser('~/Library/Logs'),
|
||||
appname)
|
||||
elif system == "win32":
|
||||
path = user_data_dir(appname, appauthor, version)
|
||||
version = False
|
||||
if opinion:
|
||||
path = os.path.join(path, "Logs")
|
||||
else:
|
||||
path = user_cache_dir(appname, appauthor, version)
|
||||
version = False
|
||||
if opinion:
|
||||
path = os.path.join(path, "log")
|
||||
if appname and version:
|
||||
path = os.path.join(path, version)
|
||||
return path
|
||||
|
||||
|
||||
class AppDirs(object):
|
||||
"""Convenience wrapper for getting application dirs."""
|
||||
def __init__(self, appname=None, appauthor=None, version=None,
|
||||
roaming=False, multipath=False):
|
||||
self.appname = appname
|
||||
self.appauthor = appauthor
|
||||
self.version = version
|
||||
self.roaming = roaming
|
||||
self.multipath = multipath
|
||||
|
||||
@property
|
||||
def user_data_dir(self):
|
||||
return user_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
|
||||
@property
|
||||
def site_data_dir(self):
|
||||
return site_data_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
|
||||
@property
|
||||
def user_config_dir(self):
|
||||
return user_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, roaming=self.roaming)
|
||||
|
||||
@property
|
||||
def site_config_dir(self):
|
||||
return site_config_dir(self.appname, self.appauthor,
|
||||
version=self.version, multipath=self.multipath)
|
||||
|
||||
@property
|
||||
def user_cache_dir(self):
|
||||
return user_cache_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_state_dir(self):
|
||||
return user_state_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
@property
|
||||
def user_log_dir(self):
|
||||
return user_log_dir(self.appname, self.appauthor,
|
||||
version=self.version)
|
||||
|
||||
|
||||
#---- internal support stuff
|
||||
|
||||
def _get_win_folder_from_registry(csidl_name):
|
||||
"""This is a fallback technique at best. I'm not sure if using the
|
||||
registry for this guarantees us the correct answer for all CSIDL_*
|
||||
names.
|
||||
"""
|
||||
if PY3:
|
||||
import winreg as _winreg
|
||||
else:
|
||||
import _winreg
|
||||
|
||||
shell_folder_name = {
|
||||
"CSIDL_APPDATA": "AppData",
|
||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
||||
}[csidl_name]
|
||||
|
||||
key = _winreg.OpenKey(
|
||||
_winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
||||
)
|
||||
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
|
||||
return dir
|
||||
|
||||
|
||||
def _get_win_folder_with_pywin32(csidl_name):
|
||||
from win32com.shell import shellcon, shell
|
||||
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
|
||||
# Try to make this a unicode path because SHGetFolderPath does
|
||||
# not return unicode strings when there is unicode data in the
|
||||
# path.
|
||||
try:
|
||||
dir = unicode(dir)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in dir:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
try:
|
||||
import win32api
|
||||
dir = win32api.GetShortPathName(dir)
|
||||
except ImportError:
|
||||
pass
|
||||
except UnicodeError:
|
||||
pass
|
||||
return dir
|
||||
|
||||
|
||||
def _get_win_folder_with_ctypes(csidl_name):
|
||||
import ctypes
|
||||
|
||||
csidl_const = {
|
||||
"CSIDL_APPDATA": 26,
|
||||
"CSIDL_COMMON_APPDATA": 35,
|
||||
"CSIDL_LOCAL_APPDATA": 28,
|
||||
}[csidl_name]
|
||||
|
||||
buf = ctypes.create_unicode_buffer(1024)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in buf:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf2 = ctypes.create_unicode_buffer(1024)
|
||||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
||||
buf = buf2
|
||||
|
||||
return buf.value
|
||||
|
||||
def _get_win_folder_with_jna(csidl_name):
|
||||
import array
|
||||
from com.sun import jna
|
||||
from com.sun.jna.platform import win32
|
||||
|
||||
buf_size = win32.WinDef.MAX_PATH * 2
|
||||
buf = array.zeros('c', buf_size)
|
||||
shell = win32.Shell32.INSTANCE
|
||||
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
# Downgrade to short path name if have highbit chars. See
|
||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
||||
has_high_char = False
|
||||
for c in dir:
|
||||
if ord(c) > 255:
|
||||
has_high_char = True
|
||||
break
|
||||
if has_high_char:
|
||||
buf = array.zeros('c', buf_size)
|
||||
kernel = win32.Kernel32.INSTANCE
|
||||
if kernel.GetShortPathName(dir, buf, buf_size):
|
||||
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
|
||||
|
||||
return dir
|
||||
|
||||
if system == "win32":
|
||||
try:
|
||||
import win32com.shell
|
||||
_get_win_folder = _get_win_folder_with_pywin32
|
||||
except ImportError:
|
||||
try:
|
||||
from ctypes import windll
|
||||
_get_win_folder = _get_win_folder_with_ctypes
|
||||
except ImportError:
|
||||
try:
|
||||
import com.sun.jna
|
||||
_get_win_folder = _get_win_folder_with_jna
|
||||
except ImportError:
|
||||
_get_win_folder = _get_win_folder_from_registry
|
||||
|
||||
|
||||
#---- self test code
|
||||
|
||||
if __name__ == "__main__":
|
||||
appname = "MyApp"
|
||||
appauthor = "MyCompany"
|
||||
|
||||
props = ("user_data_dir",
|
||||
"user_config_dir",
|
||||
"user_cache_dir",
|
||||
"user_state_dir",
|
||||
"user_log_dir",
|
||||
"site_data_dir",
|
||||
"site_config_dir")
|
||||
|
||||
print("-- app dirs %s --" % __version__)
|
||||
|
||||
print("-- app dirs (with optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor, version="1.0")
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (without optional 'version')")
|
||||
dirs = AppDirs(appname, appauthor)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (without optional 'appauthor')")
|
||||
dirs = AppDirs(appname)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
|
||||
print("\n-- app dirs (with disabled 'appauthor')")
|
||||
dirs = AppDirs(appname, appauthor=False)
|
||||
for prop in props:
|
||||
print("%s: %s" % (prop, getattr(dirs, prop)))
|
||||
21
Lib/site-packages/pipenv/vendor/attr/LICENSE
vendored
Normal file
21
Lib/site-packages/pipenv/vendor/attr/LICENSE
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Hynek Schlawack
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
68
Lib/site-packages/pipenv/vendor/attr/__init__.py
vendored
Normal file
68
Lib/site-packages/pipenv/vendor/attr/__init__.py
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from functools import partial
|
||||
|
||||
from . import converters, exceptions, filters, validators
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._funcs import asdict, assoc, astuple, evolve, has
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
attrib,
|
||||
attrs,
|
||||
fields,
|
||||
fields_dict,
|
||||
make_class,
|
||||
validate,
|
||||
)
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
|
||||
__version__ = "19.3.0"
|
||||
__version_info__ = VersionInfo._from_version_string(__version__)
|
||||
|
||||
__title__ = "attrs"
|
||||
__description__ = "Classes Without Boilerplate"
|
||||
__url__ = "https://www.attrs.org/"
|
||||
__uri__ = __url__
|
||||
__doc__ = __description__ + " <" + __uri__ + ">"
|
||||
|
||||
__author__ = "Hynek Schlawack"
|
||||
__email__ = "hs@ox.cx"
|
||||
|
||||
__license__ = "MIT"
|
||||
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
|
||||
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Attribute",
|
||||
"Factory",
|
||||
"NOTHING",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"attr",
|
||||
"attrib",
|
||||
"attributes",
|
||||
"attrs",
|
||||
"converters",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"fields",
|
||||
"fields_dict",
|
||||
"filters",
|
||||
"get_run_validators",
|
||||
"has",
|
||||
"ib",
|
||||
"make_class",
|
||||
"s",
|
||||
"set_run_validators",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_compat.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_compat.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_config.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_config.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_funcs.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_funcs.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_make.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_make.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_version_info.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/_version_info.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/converters.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/converters.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/exceptions.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/exceptions.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/filters.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/filters.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/validators.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/attr/__pycache__/validators.cpython-38.pyc
vendored
Normal file
Binary file not shown.
230
Lib/site-packages/pipenv/vendor/attr/_compat.py
vendored
Normal file
230
Lib/site-packages/pipenv/vendor/attr/_compat.py
vendored
Normal file
@@ -0,0 +1,230 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
|
||||
|
||||
if PYPY or sys.version_info[:2] >= (3, 6):
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
if PY2:
|
||||
from UserDict import IterableUserDict
|
||||
from collections import Mapping, Sequence
|
||||
|
||||
# We 'bundle' isclass instead of using inspect as importing inspect is
|
||||
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
|
||||
def isclass(klass):
|
||||
return isinstance(klass, (type, types.ClassType))
|
||||
|
||||
# TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
|
||||
TYPE = "type"
|
||||
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
# Python 2 is bereft of a read-only dict proxy, so we make one!
|
||||
class ReadOnlyDict(IterableUserDict):
|
||||
"""
|
||||
Best-effort read-only dict wrapper.
|
||||
"""
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item assignment"
|
||||
)
|
||||
|
||||
def update(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'update'"
|
||||
)
|
||||
|
||||
def __delitem__(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item deletion"
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'clear'"
|
||||
)
|
||||
|
||||
def pop(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'pop'"
|
||||
)
|
||||
|
||||
def popitem(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'popitem'"
|
||||
)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'setdefault'"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# Override to be identical to the Python 3 version.
|
||||
return "mappingproxy(" + repr(self.data) + ")"
|
||||
|
||||
def metadata_proxy(d):
|
||||
res = ReadOnlyDict()
|
||||
res.data.update(d) # We blocked update, so we have to do it like this.
|
||||
return res
|
||||
|
||||
def just_warn(*args, **kw): # pragma: nocover
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
|
||||
|
||||
else: # Python 3 and later.
|
||||
from collections.abc import Mapping, Sequence # noqa
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Running interpreter doesn't sufficiently support code object "
|
||||
"introspection. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slotted classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
def isclass(klass):
|
||||
return isinstance(klass, type)
|
||||
|
||||
TYPE = "class"
|
||||
|
||||
def iteritems(d):
|
||||
return d.items()
|
||||
|
||||
def metadata_proxy(d):
|
||||
return types.MappingProxyType(dict(d))
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""Return a function of two arguments (cell, value) which sets
|
||||
the value stored in the closure cell `cell` to `value`.
|
||||
"""
|
||||
# pypy makes this easy. (It also supports the logic below, but
|
||||
# why not do the easy/fast thing?)
|
||||
if PYPY: # pragma: no cover
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
return set_closure_cell
|
||||
|
||||
# Otherwise gotta do it the hard way.
|
||||
|
||||
# Create a function that will set its first cellvar to `value`.
|
||||
def set_first_cellvar_to(value):
|
||||
x = value
|
||||
return
|
||||
|
||||
# This function will be eliminated as dead code, but
|
||||
# not before its reference to `x` forces `x` to be
|
||||
# represented as a closure cell rather than a local.
|
||||
def force_x_to_be_a_cell(): # pragma: no cover
|
||||
return x
|
||||
|
||||
try:
|
||||
# Extract the code object and make sure our assumptions about
|
||||
# the closure behavior are correct.
|
||||
if PY2:
|
||||
co = set_first_cellvar_to.func_code
|
||||
else:
|
||||
co = set_first_cellvar_to.__code__
|
||||
if co.co_cellvars != ("x",) or co.co_freevars != ():
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
# Convert this code object to a code object that sets the
|
||||
# function's first _freevar_ (not cellvar) to the argument.
|
||||
if sys.version_info >= (3, 8):
|
||||
# CPython 3.8+ has an incompatible CodeType signature
|
||||
# (added a posonlyargcount argument) but also added
|
||||
# CodeType.replace() to do this without counting parameters.
|
||||
set_first_freevar_code = co.replace(
|
||||
co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
|
||||
)
|
||||
else:
|
||||
args = [co.co_argcount]
|
||||
if not PY2:
|
||||
args.append(co.co_kwonlyargcount)
|
||||
args.extend(
|
||||
[
|
||||
co.co_nlocals,
|
||||
co.co_stacksize,
|
||||
co.co_flags,
|
||||
co.co_code,
|
||||
co.co_consts,
|
||||
co.co_names,
|
||||
co.co_varnames,
|
||||
co.co_filename,
|
||||
co.co_name,
|
||||
co.co_firstlineno,
|
||||
co.co_lnotab,
|
||||
# These two arguments are reversed:
|
||||
co.co_cellvars,
|
||||
co.co_freevars,
|
||||
]
|
||||
)
|
||||
set_first_freevar_code = types.CodeType(*args)
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
# Create a function using the set_first_freevar_code,
|
||||
# whose first closure cell is `cell`. Calling it will
|
||||
# change the value of that cell.
|
||||
setter = types.FunctionType(
|
||||
set_first_freevar_code, {}, "setter", (), (cell,)
|
||||
)
|
||||
# And call it to set the cell.
|
||||
setter(value)
|
||||
|
||||
# Make sure it works on this interpreter:
|
||||
def make_func_with_cell():
|
||||
x = None
|
||||
|
||||
def func():
|
||||
return x # pragma: no cover
|
||||
|
||||
return func
|
||||
|
||||
if PY2:
|
||||
cell = make_func_with_cell().func_closure[0]
|
||||
else:
|
||||
cell = make_func_with_cell().__closure__[0]
|
||||
set_closure_cell(cell, 100)
|
||||
if cell.cell_contents != 100:
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
except Exception:
|
||||
return just_warn
|
||||
else:
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
23
Lib/site-packages/pipenv/vendor/attr/_config.py
vendored
Normal file
23
Lib/site-packages/pipenv/vendor/attr/_config.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
__all__ = ["set_run_validators", "get_run_validators"]
|
||||
|
||||
_run_validators = True
|
||||
|
||||
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
raise TypeError("'run' must be bool.")
|
||||
global _run_validators
|
||||
_run_validators = run
|
||||
|
||||
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
"""
|
||||
return _run_validators
|
||||
290
Lib/site-packages/pipenv/vendor/attr/_funcs.py
vendored
Normal file
290
Lib/site-packages/pipenv/vendor/attr/_funcs.py
vendored
Normal file
@@ -0,0 +1,290 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._make import NOTHING, _obj_setattr, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
|
||||
def asdict(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
dict_factory=dict,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a dict.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable dict_factory: A callable to produce dictionaries from. For
|
||||
example, to produce ordered dictionaries instead of normal Python
|
||||
dictionaries, pass in ``collections.OrderedDict``.
|
||||
:param bool retain_collection_types: Do not convert to ``list`` when
|
||||
encountering an attribute whose type is ``tuple`` or ``set``. Only
|
||||
meaningful if ``recurse`` is ``True``.
|
||||
|
||||
:rtype: return type of *dict_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv[a.name] = asdict(
|
||||
v, True, filter, dict_factory, retain_collection_types
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set)):
|
||||
cf = v.__class__ if retain_collection_types is True else list
|
||||
rv[a.name] = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i, filter, dict_factory, retain_collection_types
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = dict_factory
|
||||
rv[a.name] = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk, filter, df, retain_collection_types
|
||||
),
|
||||
_asdict_anything(
|
||||
vv, filter, df, retain_collection_types
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
else:
|
||||
rv[a.name] = v
|
||||
return rv
|
||||
|
||||
|
||||
def _asdict_anything(val, filter, dict_factory, retain_collection_types):
|
||||
"""
|
||||
``asdict`` only works on attrs instances, this works on anything.
|
||||
"""
|
||||
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
||||
# Attrs class.
|
||||
rv = asdict(val, True, filter, dict_factory, retain_collection_types)
|
||||
elif isinstance(val, (tuple, list, set)):
|
||||
cf = val.__class__ if retain_collection_types is True else list
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i, filter, dict_factory, retain_collection_types
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
)
|
||||
elif isinstance(val, dict):
|
||||
df = dict_factory
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(kk, filter, df, retain_collection_types),
|
||||
_asdict_anything(vv, filter, df, retain_collection_types),
|
||||
)
|
||||
for kk, vv in iteritems(val)
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
return rv
|
||||
|
||||
|
||||
def astuple(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
tuple_factory=tuple,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a tuple.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable tuple_factory: A callable to produce tuples from. For
|
||||
example, to produce lists instead of tuples.
|
||||
:param bool retain_collection_types: Do not convert to ``list``
|
||||
or ``dict`` when encountering an attribute which type is
|
||||
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
|
||||
``True``.
|
||||
|
||||
:rtype: return type of *tuple_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = []
|
||||
retain = retain_collection_types # Very long. :/
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv.append(
|
||||
astuple(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set)):
|
||||
cf = v.__class__ if retain is True else list
|
||||
rv.append(
|
||||
cf(
|
||||
[
|
||||
astuple(
|
||||
j,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(j.__class__)
|
||||
else j
|
||||
for j in v
|
||||
]
|
||||
)
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = v.__class__ if retain is True else dict
|
||||
rv.append(
|
||||
df(
|
||||
(
|
||||
astuple(
|
||||
kk,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(kk.__class__)
|
||||
else kk,
|
||||
astuple(
|
||||
vv,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(vv.__class__)
|
||||
else vv,
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
)
|
||||
else:
|
||||
rv.append(v)
|
||||
else:
|
||||
rv.append(v)
|
||||
return rv if tuple_factory is list else tuple_factory(rv)
|
||||
|
||||
|
||||
def has(cls):
|
||||
"""
|
||||
Check whether *cls* is a class with ``attrs`` attributes.
|
||||
|
||||
:param type cls: Class to introspect.
|
||||
:raise TypeError: If *cls* is not a class.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return getattr(cls, "__attrs_attrs__", None) is not None
|
||||
|
||||
|
||||
def assoc(inst, **changes):
|
||||
"""
|
||||
Copy *inst* and apply *changes*.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
|
||||
be found on *cls*.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use `evolve` instead.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"assoc is deprecated and will be removed after 2018/01.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in iteritems(changes):
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
raise AttrsAttributeNotFoundError(
|
||||
"{k} is not an attrs attribute on {cl}.".format(
|
||||
k=k, cl=new.__class__
|
||||
)
|
||||
)
|
||||
_obj_setattr(new, k, v)
|
||||
return new
|
||||
|
||||
|
||||
def evolve(inst, **changes):
|
||||
"""
|
||||
Create a new instance, based on *inst* with *changes* applied.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise TypeError: If *attr_name* couldn't be found in the class
|
||||
``__init__``.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
cls = inst.__class__
|
||||
attrs = fields(cls)
|
||||
for a in attrs:
|
||||
if not a.init:
|
||||
continue
|
||||
attr_name = a.name # To deal with private attributes.
|
||||
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
|
||||
if init_name not in changes:
|
||||
changes[init_name] = getattr(inst, attr_name)
|
||||
return cls(**changes)
|
||||
2168
Lib/site-packages/pipenv/vendor/attr/_make.py
vendored
Normal file
2168
Lib/site-packages/pipenv/vendor/attr/_make.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
85
Lib/site-packages/pipenv/vendor/attr/_version_info.py
vendored
Normal file
85
Lib/site-packages/pipenv/vendor/attr/_version_info.py
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from functools import total_ordering
|
||||
|
||||
from ._funcs import astuple
|
||||
from ._make import attrib, attrs
|
||||
|
||||
|
||||
@total_ordering
|
||||
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||
class VersionInfo(object):
|
||||
"""
|
||||
A version object that can be compared to tuple of length 1--4:
|
||||
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
|
||||
True
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
|
||||
True
|
||||
>>> vi = attr.VersionInfo(19, 2, 0, "final")
|
||||
>>> vi < (19, 1, 1)
|
||||
False
|
||||
>>> vi < (19,)
|
||||
False
|
||||
>>> vi == (19, 2,)
|
||||
True
|
||||
>>> vi == (19, 2, 1)
|
||||
False
|
||||
|
||||
.. versionadded:: 19.2
|
||||
"""
|
||||
|
||||
year = attrib(type=int)
|
||||
minor = attrib(type=int)
|
||||
micro = attrib(type=int)
|
||||
releaselevel = attrib(type=str)
|
||||
|
||||
@classmethod
|
||||
def _from_version_string(cls, s):
|
||||
"""
|
||||
Parse *s* and return a _VersionInfo.
|
||||
"""
|
||||
v = s.split(".")
|
||||
if len(v) == 3:
|
||||
v.append("final")
|
||||
|
||||
return cls(
|
||||
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
|
||||
)
|
||||
|
||||
def _ensure_tuple(self, other):
|
||||
"""
|
||||
Ensure *other* is a tuple of a valid length.
|
||||
|
||||
Returns a possibly transformed *other* and ourselves as a tuple of
|
||||
the same length as *other*.
|
||||
"""
|
||||
|
||||
if self.__class__ is other.__class__:
|
||||
other = astuple(other)
|
||||
|
||||
if not isinstance(other, tuple):
|
||||
raise NotImplementedError
|
||||
|
||||
if not (1 <= len(other) <= 4):
|
||||
raise NotImplementedError
|
||||
|
||||
return astuple(self)[: len(other)], other
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
return us == them
|
||||
|
||||
def __lt__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
|
||||
# have to do anything special with releaselevel for now.
|
||||
return us < them
|
||||
78
Lib/site-packages/pipenv/vendor/attr/converters.py
vendored
Normal file
78
Lib/site-packages/pipenv/vendor/attr/converters.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._make import NOTHING, Factory
|
||||
|
||||
|
||||
def optional(converter):
|
||||
"""
|
||||
A converter that allows an attribute to be optional. An optional attribute
|
||||
is one which can be set to ``None``.
|
||||
|
||||
:param callable converter: the converter that is used for non-``None``
|
||||
values.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
def optional_converter(val):
|
||||
if val is None:
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
return optional_converter
|
||||
|
||||
|
||||
def default_if_none(default=NOTHING, factory=None):
|
||||
"""
|
||||
A converter that allows to replace ``None`` values by *default* or the
|
||||
result of *factory*.
|
||||
|
||||
:param default: Value to be used if ``None`` is passed. Passing an instance
|
||||
of `attr.Factory` is supported, however the ``takes_self`` option
|
||||
is *not*.
|
||||
:param callable factory: A callable that takes not parameters whose result
|
||||
is used if ``None`` is passed.
|
||||
|
||||
:raises TypeError: If **neither** *default* or *factory* is passed.
|
||||
:raises TypeError: If **both** *default* and *factory* are passed.
|
||||
:raises ValueError: If an instance of `attr.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
if default is NOTHING and factory is None:
|
||||
raise TypeError("Must pass either `default` or `factory`.")
|
||||
|
||||
if default is not NOTHING and factory is not None:
|
||||
raise TypeError(
|
||||
"Must pass either `default` or `factory` but not both."
|
||||
)
|
||||
|
||||
if factory is not None:
|
||||
default = Factory(factory)
|
||||
|
||||
if isinstance(default, Factory):
|
||||
if default.takes_self:
|
||||
raise ValueError(
|
||||
"`takes_self` is not supported by default_if_none."
|
||||
)
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default.factory()
|
||||
|
||||
else:
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
74
Lib/site-packages/pipenv/vendor/attr/exceptions.py
vendored
Normal file
74
Lib/site-packages/pipenv/vendor/attr/exceptions.py
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class FrozenInstanceError(AttributeError):
|
||||
"""
|
||||
A frozen/immutable instance has been attempted to be modified.
|
||||
|
||||
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||
and subclassing `AttributeError`.
|
||||
|
||||
.. versionadded:: 16.1.0
|
||||
"""
|
||||
|
||||
msg = "can't set attribute"
|
||||
args = [msg]
|
||||
|
||||
|
||||
class AttrsAttributeNotFoundError(ValueError):
|
||||
"""
|
||||
An ``attrs`` function couldn't find an attribute that the user asked for.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotAnAttrsClassError(ValueError):
|
||||
"""
|
||||
A non-``attrs`` class has been passed into an ``attrs`` function.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class DefaultAlreadySetError(RuntimeError):
|
||||
"""
|
||||
A default has been set using ``attr.ib()`` and is attempted to be reset
|
||||
using the decorator.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
|
||||
class UnannotatedAttributeError(RuntimeError):
|
||||
"""
|
||||
A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
|
||||
annotation.
|
||||
|
||||
.. versionadded:: 17.3.0
|
||||
"""
|
||||
|
||||
|
||||
class PythonTooOldError(RuntimeError):
|
||||
"""
|
||||
An ``attrs`` feature requiring a more recent python version has been used.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
"""
|
||||
A ``attr.ib()`` requiring a callable has been set with a value
|
||||
that is not callable.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
"""
|
||||
|
||||
def __init__(self, msg, value):
|
||||
super(TypeError, self).__init__(msg, value)
|
||||
self.msg = msg
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.msg)
|
||||
52
Lib/site-packages/pipenv/vendor/attr/filters.py
vendored
Normal file
52
Lib/site-packages/pipenv/vendor/attr/filters.py
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Commonly useful filters for `attr.asdict`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import isclass
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
def _split_what(what):
|
||||
"""
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isclass(cls)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Whitelist *what*.
|
||||
|
||||
:param what: What to whitelist.
|
||||
:type what: `list` of `type` or `attr.Attribute`\\ s
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def include_(attribute, value):
|
||||
return value.__class__ in cls or attribute in attrs
|
||||
|
||||
return include_
|
||||
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Blacklist *what*.
|
||||
|
||||
:param what: What to blacklist.
|
||||
:type what: `list` of classes or `attr.Attribute`\\ s.
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def exclude_(attribute, value):
|
||||
return value.__class__ not in cls and attribute not in attrs
|
||||
|
||||
return exclude_
|
||||
378
Lib/site-packages/pipenv/vendor/attr/validators.py
vendored
Normal file
378
Lib/site-packages/pipenv/vendor/attr/validators.py
vendored
Normal file
@@ -0,0 +1,378 @@
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import re
|
||||
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
from .exceptions import NotCallableError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"and_",
|
||||
"deep_iterable",
|
||||
"deep_mapping",
|
||||
"in_",
|
||||
"instance_of",
|
||||
"is_callable",
|
||||
"matches_re",
|
||||
"optional",
|
||||
"provides",
|
||||
]
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InstanceOfValidator(object):
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not isinstance(value, self.type):
|
||||
raise TypeError(
|
||||
"'{name}' must be {type!r} (got {value!r} that is a "
|
||||
"{actual!r}).".format(
|
||||
name=attr.name,
|
||||
type=self.type,
|
||||
actual=value.__class__,
|
||||
value=value,
|
||||
),
|
||||
attr,
|
||||
self.type,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<instance_of validator for type {type!r}>".format(
|
||||
type=self.type
|
||||
)
|
||||
|
||||
|
||||
def instance_of(type):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called
|
||||
with a wrong type for this particular attribute (checks are performed using
|
||||
`isinstance` therefore it's also valid to pass a tuple of types).
|
||||
|
||||
:param type: The type to check for.
|
||||
:type type: type or tuple of types
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected type, and the value it
|
||||
got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True)
|
||||
class _MatchesReValidator(object):
|
||||
regex = attrib()
|
||||
flags = attrib()
|
||||
match_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.match_func(value):
|
||||
raise ValueError(
|
||||
"'{name}' must match regex {regex!r}"
|
||||
" ({value!r} doesn't)".format(
|
||||
name=attr.name, regex=self.regex.pattern, value=value
|
||||
),
|
||||
attr,
|
||||
self.regex,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<matches_re validator for pattern {regex!r}>".format(
|
||||
regex=self.regex
|
||||
)
|
||||
|
||||
|
||||
def matches_re(regex, flags=0, func=None):
|
||||
r"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string that doesn't match *regex*.
|
||||
|
||||
:param str regex: a regex string to match against
|
||||
:param int flags: flags that will be passed to the underlying re function
|
||||
(default 0)
|
||||
:param callable func: which underlying `re` function to call (options
|
||||
are `re.fullmatch`, `re.search`, `re.match`, default
|
||||
is ``None`` which means either `re.fullmatch` or an emulation of
|
||||
it on Python 2). For performance reasons, they won't be used directly
|
||||
but on a pre-`re.compile`\ ed pattern.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
"""
|
||||
fullmatch = getattr(re, "fullmatch", None)
|
||||
valid_funcs = (fullmatch, None, re.search, re.match)
|
||||
if func not in valid_funcs:
|
||||
raise ValueError(
|
||||
"'func' must be one of %s."
|
||||
% (
|
||||
", ".join(
|
||||
sorted(
|
||||
e and e.__name__ or "None" for e in set(valid_funcs)
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
pattern = re.compile(regex, flags)
|
||||
if func is re.match:
|
||||
match_func = pattern.match
|
||||
elif func is re.search:
|
||||
match_func = pattern.search
|
||||
else:
|
||||
if fullmatch:
|
||||
match_func = pattern.fullmatch
|
||||
else:
|
||||
pattern = re.compile(r"(?:{})\Z".format(regex), flags)
|
||||
match_func = pattern.match
|
||||
|
||||
return _MatchesReValidator(pattern, flags, match_func)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _ProvidesValidator(object):
|
||||
interface = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.interface.providedBy(value):
|
||||
raise TypeError(
|
||||
"'{name}' must provide {interface!r} which {value!r} "
|
||||
"doesn't.".format(
|
||||
name=attr.name, interface=self.interface, value=value
|
||||
),
|
||||
attr,
|
||||
self.interface,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<provides validator for interface {interface!r}>".format(
|
||||
interface=self.interface
|
||||
)
|
||||
|
||||
|
||||
def provides(interface):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called
|
||||
with an object that does not provide the requested *interface* (checks are
|
||||
performed using ``interface.providedBy(value)`` (see `zope.interface
|
||||
<https://zopeinterface.readthedocs.io/en/latest/>`_).
|
||||
|
||||
:param zope.interface.Interface interface: The interface to check for.
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected interface, and the
|
||||
value it got.
|
||||
"""
|
||||
return _ProvidesValidator(interface)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _OptionalValidator(object):
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
self.validator(inst, attr, value)
|
||||
|
||||
def __repr__(self):
|
||||
return "<optional validator for {what} or None>".format(
|
||||
what=repr(self.validator)
|
||||
)
|
||||
|
||||
|
||||
def optional(validator):
|
||||
"""
|
||||
A validator that makes an attribute optional. An optional attribute is one
|
||||
which can be set to ``None`` in addition to satisfying the requirements of
|
||||
the sub-validator.
|
||||
|
||||
:param validator: A validator (or a list of validators) that is used for
|
||||
non-``None`` values.
|
||||
:type validator: callable or `list` of callables.
|
||||
|
||||
.. versionadded:: 15.1.0
|
||||
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
|
||||
"""
|
||||
if isinstance(validator, list):
|
||||
return _OptionalValidator(_AndValidator(validator))
|
||||
return _OptionalValidator(validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InValidator(object):
|
||||
options = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
try:
|
||||
in_options = value in self.options
|
||||
except TypeError: # e.g. `1 in "abc"`
|
||||
in_options = False
|
||||
|
||||
if not in_options:
|
||||
raise ValueError(
|
||||
"'{name}' must be in {options!r} (got {value!r})".format(
|
||||
name=attr.name, options=self.options, value=value
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<in_ validator with options {options!r}>".format(
|
||||
options=self.options
|
||||
)
|
||||
|
||||
|
||||
def in_(options):
|
||||
"""
|
||||
A validator that raises a `ValueError` if the initializer is called
|
||||
with a value that does not belong in the options provided. The check is
|
||||
performed using ``value in options``.
|
||||
|
||||
:param options: Allowed options.
|
||||
:type options: list, tuple, `enum.Enum`, ...
|
||||
|
||||
:raises ValueError: With a human readable error message, the attribute (of
|
||||
type `attr.Attribute`), the expected options, and the value it
|
||||
got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
return _InValidator(options)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=False, hash=True)
|
||||
class _IsCallableValidator(object):
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not callable(value):
|
||||
message = (
|
||||
"'{name}' must be callable "
|
||||
"(got {value!r} that is a {actual!r})."
|
||||
)
|
||||
raise NotCallableError(
|
||||
msg=message.format(
|
||||
name=attr.name, value=value, actual=value.__class__
|
||||
),
|
||||
value=value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<is_callable validator>"
|
||||
|
||||
|
||||
def is_callable():
|
||||
"""
|
||||
A validator that raises a `attr.exceptions.NotCallableError` if the
|
||||
initializer is called with a value for this particular attribute
|
||||
that is not callable.
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises `attr.exceptions.NotCallableError`: With a human readable error
|
||||
message containing the attribute (`attr.Attribute`) name,
|
||||
and the value it got.
|
||||
"""
|
||||
return _IsCallableValidator()
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepIterable(object):
|
||||
member_validator = attrib(validator=is_callable())
|
||||
iterable_validator = attrib(
|
||||
default=None, validator=optional(is_callable())
|
||||
)
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.iterable_validator is not None:
|
||||
self.iterable_validator(inst, attr, value)
|
||||
|
||||
for member in value:
|
||||
self.member_validator(inst, attr, member)
|
||||
|
||||
def __repr__(self):
|
||||
iterable_identifier = (
|
||||
""
|
||||
if self.iterable_validator is None
|
||||
else " {iterable!r}".format(iterable=self.iterable_validator)
|
||||
)
|
||||
return (
|
||||
"<deep_iterable validator for{iterable_identifier}"
|
||||
" iterables of {member!r}>"
|
||||
).format(
|
||||
iterable_identifier=iterable_identifier,
|
||||
member=self.member_validator,
|
||||
)
|
||||
|
||||
|
||||
def deep_iterable(member_validator, iterable_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of an iterable.
|
||||
|
||||
:param member_validator: Validator to apply to iterable members
|
||||
:param iterable_validator: Validator to apply to iterable itself
|
||||
(optional)
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepIterable(member_validator, iterable_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepMapping(object):
|
||||
key_validator = attrib(validator=is_callable())
|
||||
value_validator = attrib(validator=is_callable())
|
||||
mapping_validator = attrib(default=None, validator=optional(is_callable()))
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.mapping_validator is not None:
|
||||
self.mapping_validator(inst, attr, value)
|
||||
|
||||
for key in value:
|
||||
self.key_validator(inst, attr, key)
|
||||
self.value_validator(inst, attr, value[key])
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<deep_mapping validator for objects mapping {key!r} to {value!r}>"
|
||||
).format(key=self.key_validator, value=self.value_validator)
|
||||
|
||||
|
||||
def deep_mapping(key_validator, value_validator, mapping_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of a dictionary.
|
||||
|
||||
:param key_validator: Validator to apply to dictionary keys
|
||||
:param value_validator: Validator to apply to dictionary values
|
||||
:param mapping_validator: Validator to apply to top-level mapping
|
||||
attribute (optional)
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||
5
Lib/site-packages/pipenv/vendor/backports/__init__.py
vendored
Normal file
5
Lib/site-packages/pipenv/vendor/backports/__init__.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
from . import shutil_get_terminal_size
|
||||
from . import weakref
|
||||
from . import enum
|
||||
from . import functools_lru_cache
|
||||
BIN
Lib/site-packages/pipenv/vendor/backports/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/backports/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/backports/__pycache__/functools_lru_cache.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/backports/__pycache__/functools_lru_cache.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/backports/__pycache__/weakref.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/backports/__pycache__/weakref.cpython-38.pyc
vendored
Normal file
Binary file not shown.
32
Lib/site-packages/pipenv/vendor/backports/enum/LICENSE
vendored
Normal file
32
Lib/site-packages/pipenv/vendor/backports/enum/LICENSE
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
Copyright (c) 2013, Ethan Furman.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
Neither the name Ethan Furman nor the names of any
|
||||
contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written
|
||||
permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
3
Lib/site-packages/pipenv/vendor/backports/enum/README
vendored
Normal file
3
Lib/site-packages/pipenv/vendor/backports/enum/README
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
enum34 is the new Python stdlib enum module available in Python 3.4
|
||||
backported for previous versions of Python from 2.4 to 3.3.
|
||||
tested on 2.6, 2.7, and 3.3+
|
||||
838
Lib/site-packages/pipenv/vendor/backports/enum/__init__.py
vendored
Normal file
838
Lib/site-packages/pipenv/vendor/backports/enum/__init__.py
vendored
Normal file
@@ -0,0 +1,838 @@
|
||||
"""Python Enumerations"""
|
||||
|
||||
import sys as _sys
|
||||
|
||||
__all__ = ['Enum', 'IntEnum', 'unique']
|
||||
|
||||
version = 1, 1, 10
|
||||
|
||||
pyver = float('%s.%s' % _sys.version_info[:2])
|
||||
|
||||
try:
|
||||
any
|
||||
except NameError:
|
||||
def any(iterable):
|
||||
for element in iterable:
|
||||
if element:
|
||||
return True
|
||||
return False
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
OrderedDict = None
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# In Python 2 basestring is the ancestor of both str and unicode
|
||||
# in Python 3 it's just str, but was missing in 3.1
|
||||
basestring = str
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
# In Python 3 unicode no longer exists (it's just str)
|
||||
unicode = str
|
||||
|
||||
class _RouteClassAttributeToGetattr(object):
|
||||
"""Route attribute access on a class to __getattr__.
|
||||
|
||||
This is a descriptor, used to define attributes that act differently when
|
||||
accessed through an instance and through a class. Instance access remains
|
||||
normal, but access to an attribute through a class will be routed to the
|
||||
class's __getattr__ method; this is done by raising AttributeError.
|
||||
|
||||
"""
|
||||
def __init__(self, fget=None):
|
||||
self.fget = fget
|
||||
|
||||
def __get__(self, instance, ownerclass=None):
|
||||
if instance is None:
|
||||
raise AttributeError()
|
||||
return self.fget(instance)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __delete__(self, instance):
|
||||
raise AttributeError("can't delete attribute")
|
||||
|
||||
|
||||
def _is_descriptor(obj):
|
||||
"""Returns True if obj is a descriptor, False otherwise."""
|
||||
return (
|
||||
hasattr(obj, '__get__') or
|
||||
hasattr(obj, '__set__') or
|
||||
hasattr(obj, '__delete__'))
|
||||
|
||||
|
||||
def _is_dunder(name):
|
||||
"""Returns True if a __dunder__ name, False otherwise."""
|
||||
return (name[:2] == name[-2:] == '__' and
|
||||
name[2:3] != '_' and
|
||||
name[-3:-2] != '_' and
|
||||
len(name) > 4)
|
||||
|
||||
|
||||
def _is_sunder(name):
|
||||
"""Returns True if a _sunder_ name, False otherwise."""
|
||||
return (name[0] == name[-1] == '_' and
|
||||
name[1:2] != '_' and
|
||||
name[-2:-1] != '_' and
|
||||
len(name) > 2)
|
||||
|
||||
|
||||
def _make_class_unpicklable(cls):
|
||||
"""Make the given class un-picklable."""
|
||||
def _break_on_call_reduce(self, protocol=None):
|
||||
raise TypeError('%r cannot be pickled' % self)
|
||||
cls.__reduce_ex__ = _break_on_call_reduce
|
||||
cls.__module__ = '<unknown>'
|
||||
|
||||
|
||||
class _EnumDict(dict):
|
||||
"""Track enum member order and ensure member names are not reused.
|
||||
|
||||
EnumMeta will use the names found in self._member_names as the
|
||||
enumeration member names.
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
super(_EnumDict, self).__init__()
|
||||
self._member_names = []
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Changes anything not dundered or not a descriptor.
|
||||
|
||||
If a descriptor is added with the same name as an enum member, the name
|
||||
is removed from _member_names (this may leave a hole in the numerical
|
||||
sequence of values).
|
||||
|
||||
If an enum member name is used twice, an error is raised; duplicate
|
||||
values are not checked for.
|
||||
|
||||
Single underscore (sunder) names are reserved.
|
||||
|
||||
Note: in 3.x __order__ is simply discarded as a not necessary piece
|
||||
leftover from 2.x
|
||||
|
||||
"""
|
||||
if pyver >= 3.0 and key in ('_order_', '__order__'):
|
||||
return
|
||||
elif key == '__order__':
|
||||
key = '_order_'
|
||||
if _is_sunder(key):
|
||||
if key != '_order_':
|
||||
raise ValueError('_names_ are reserved for future Enum use')
|
||||
elif _is_dunder(key):
|
||||
pass
|
||||
elif key in self._member_names:
|
||||
# descriptor overwriting an enum?
|
||||
raise TypeError('Attempted to reuse key: %r' % key)
|
||||
elif not _is_descriptor(value):
|
||||
if key in self:
|
||||
# enum overwriting a descriptor?
|
||||
raise TypeError('Key already defined as: %r' % self[key])
|
||||
self._member_names.append(key)
|
||||
super(_EnumDict, self).__setitem__(key, value)
|
||||
|
||||
|
||||
# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
|
||||
# EnumMeta finishes running the first time the Enum class doesn't exist. This
|
||||
# is also why there are checks in EnumMeta like `if Enum is not None`
|
||||
Enum = None
|
||||
|
||||
|
||||
class EnumMeta(type):
|
||||
"""Metaclass for Enum"""
|
||||
@classmethod
|
||||
def __prepare__(metacls, cls, bases):
|
||||
return _EnumDict()
|
||||
|
||||
def __new__(metacls, cls, bases, classdict):
|
||||
# an Enum class is final once enumeration items have been defined; it
|
||||
# cannot be mixed with other types (int, float, etc.) if it has an
|
||||
# inherited __new__ unless a new __new__ is defined (or the resulting
|
||||
# class will fail).
|
||||
if type(classdict) is dict:
|
||||
original_dict = classdict
|
||||
classdict = _EnumDict()
|
||||
for k, v in original_dict.items():
|
||||
classdict[k] = v
|
||||
|
||||
member_type, first_enum = metacls._get_mixins_(bases)
|
||||
__new__, save_new, use_args = metacls._find_new_(classdict, member_type,
|
||||
first_enum)
|
||||
# save enum items into separate mapping so they don't get baked into
|
||||
# the new class
|
||||
members = dict((k, classdict[k]) for k in classdict._member_names)
|
||||
for name in classdict._member_names:
|
||||
del classdict[name]
|
||||
|
||||
# py2 support for definition order
|
||||
_order_ = classdict.get('_order_')
|
||||
if _order_ is None:
|
||||
if pyver < 3.0:
|
||||
try:
|
||||
_order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
|
||||
except TypeError:
|
||||
_order_ = [name for name in sorted(members.keys())]
|
||||
else:
|
||||
_order_ = classdict._member_names
|
||||
else:
|
||||
del classdict['_order_']
|
||||
if pyver < 3.0:
|
||||
if isinstance(_order_, basestring):
|
||||
_order_ = _order_.replace(',', ' ').split()
|
||||
aliases = [name for name in members if name not in _order_]
|
||||
_order_ += aliases
|
||||
|
||||
# check for illegal enum names (any others?)
|
||||
invalid_names = set(members) & set(['mro'])
|
||||
if invalid_names:
|
||||
raise ValueError('Invalid enum member name(s): %s' % (
|
||||
', '.join(invalid_names), ))
|
||||
|
||||
# save attributes from super classes so we know if we can take
|
||||
# the shortcut of storing members in the class dict
|
||||
base_attributes = set([a for b in bases for a in b.__dict__])
|
||||
# create our new Enum type
|
||||
enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
|
||||
enum_class._member_names_ = [] # names in random order
|
||||
if OrderedDict is not None:
|
||||
enum_class._member_map_ = OrderedDict()
|
||||
else:
|
||||
enum_class._member_map_ = {} # name->value map
|
||||
enum_class._member_type_ = member_type
|
||||
|
||||
# Reverse value->name map for hashable values.
|
||||
enum_class._value2member_map_ = {}
|
||||
|
||||
# instantiate them, checking for duplicates as we go
|
||||
# we instantiate first instead of checking for duplicates first in case
|
||||
# a custom __new__ is doing something funky with the values -- such as
|
||||
# auto-numbering ;)
|
||||
if __new__ is None:
|
||||
__new__ = enum_class.__new__
|
||||
for member_name in _order_:
|
||||
value = members[member_name]
|
||||
if not isinstance(value, tuple):
|
||||
args = (value, )
|
||||
else:
|
||||
args = value
|
||||
if member_type is tuple: # special case for tuple enums
|
||||
args = (args, ) # wrap it one more time
|
||||
if not use_args or not args:
|
||||
enum_member = __new__(enum_class)
|
||||
if not hasattr(enum_member, '_value_'):
|
||||
enum_member._value_ = value
|
||||
else:
|
||||
enum_member = __new__(enum_class, *args)
|
||||
if not hasattr(enum_member, '_value_'):
|
||||
enum_member._value_ = member_type(*args)
|
||||
value = enum_member._value_
|
||||
enum_member._name_ = member_name
|
||||
enum_member.__objclass__ = enum_class
|
||||
enum_member.__init__(*args)
|
||||
# If another member with the same value was already defined, the
|
||||
# new member becomes an alias to the existing one.
|
||||
for name, canonical_member in enum_class._member_map_.items():
|
||||
if canonical_member.value == enum_member._value_:
|
||||
enum_member = canonical_member
|
||||
break
|
||||
else:
|
||||
# Aliases don't appear in member names (only in __members__).
|
||||
enum_class._member_names_.append(member_name)
|
||||
# performance boost for any member that would not shadow
|
||||
# a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
|
||||
if member_name not in base_attributes:
|
||||
setattr(enum_class, member_name, enum_member)
|
||||
# now add to _member_map_
|
||||
enum_class._member_map_[member_name] = enum_member
|
||||
try:
|
||||
# This may fail if value is not hashable. We can't add the value
|
||||
# to the map, and by-value lookups for this value will be
|
||||
# linear.
|
||||
enum_class._value2member_map_[value] = enum_member
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
|
||||
# If a custom type is mixed into the Enum, and it does not know how
|
||||
# to pickle itself, pickle.dumps will succeed but pickle.loads will
|
||||
# fail. Rather than have the error show up later and possibly far
|
||||
# from the source, sabotage the pickle protocol for this class so
|
||||
# that pickle.dumps also fails.
|
||||
#
|
||||
# However, if the new class implements its own __reduce_ex__, do not
|
||||
# sabotage -- it's on them to make sure it works correctly. We use
|
||||
# __reduce_ex__ instead of any of the others as it is preferred by
|
||||
# pickle over __reduce__, and it handles all pickle protocols.
|
||||
unpicklable = False
|
||||
if '__reduce_ex__' not in classdict:
|
||||
if member_type is not object:
|
||||
methods = ('__getnewargs_ex__', '__getnewargs__',
|
||||
'__reduce_ex__', '__reduce__')
|
||||
if not any(m in member_type.__dict__ for m in methods):
|
||||
_make_class_unpicklable(enum_class)
|
||||
unpicklable = True
|
||||
|
||||
|
||||
# double check that repr and friends are not the mixin's or various
|
||||
# things break (such as pickle)
|
||||
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
|
||||
class_method = getattr(enum_class, name)
|
||||
obj_method = getattr(member_type, name, None)
|
||||
enum_method = getattr(first_enum, name, None)
|
||||
if name not in classdict and class_method is not enum_method:
|
||||
if name == '__reduce_ex__' and unpicklable:
|
||||
continue
|
||||
setattr(enum_class, name, enum_method)
|
||||
|
||||
# method resolution and int's are not playing nice
|
||||
# Python's less than 2.6 use __cmp__
|
||||
|
||||
if pyver < 2.6:
|
||||
|
||||
if issubclass(enum_class, int):
|
||||
setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
|
||||
|
||||
elif pyver < 3.0:
|
||||
|
||||
if issubclass(enum_class, int):
|
||||
for method in (
|
||||
'__le__',
|
||||
'__lt__',
|
||||
'__gt__',
|
||||
'__ge__',
|
||||
'__eq__',
|
||||
'__ne__',
|
||||
'__hash__',
|
||||
):
|
||||
setattr(enum_class, method, getattr(int, method))
|
||||
|
||||
# replace any other __new__ with our own (as long as Enum is not None,
|
||||
# anyway) -- again, this is to support pickle
|
||||
if Enum is not None:
|
||||
# if the user defined their own __new__, save it before it gets
|
||||
# clobbered in case they subclass later
|
||||
if save_new:
|
||||
setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
|
||||
setattr(enum_class, '__new__', Enum.__dict__['__new__'])
|
||||
return enum_class
|
||||
|
||||
def __bool__(cls):
|
||||
"""
|
||||
classes/types should always be True.
|
||||
"""
|
||||
return True
|
||||
|
||||
def __call__(cls, value, names=None, module=None, type=None, start=1):
|
||||
"""Either returns an existing member, or creates a new enum class.
|
||||
|
||||
This method is used both when an enum class is given a value to match
|
||||
to an enumeration member (i.e. Color(3)) and for the functional API
|
||||
(i.e. Color = Enum('Color', names='red green blue')).
|
||||
|
||||
When used for the functional API: `module`, if set, will be stored in
|
||||
the new class' __module__ attribute; `type`, if set, will be mixed in
|
||||
as the first base class.
|
||||
|
||||
Note: if `module` is not set this routine will attempt to discover the
|
||||
calling module by walking the frame stack; if this is unsuccessful
|
||||
the resulting class will not be pickleable.
|
||||
|
||||
"""
|
||||
if names is None: # simple value lookup
|
||||
return cls.__new__(cls, value)
|
||||
# otherwise, functional API: we're creating a new Enum type
|
||||
return cls._create_(value, names, module=module, type=type, start=start)
|
||||
|
||||
def __contains__(cls, member):
|
||||
return isinstance(member, cls) and member.name in cls._member_map_
|
||||
|
||||
def __delattr__(cls, attr):
|
||||
# nicer error message when someone tries to delete an attribute
|
||||
# (see issue19025).
|
||||
if attr in cls._member_map_:
|
||||
raise AttributeError(
|
||||
"%s: cannot delete Enum member." % cls.__name__)
|
||||
super(EnumMeta, cls).__delattr__(attr)
|
||||
|
||||
def __dir__(self):
|
||||
return (['__class__', '__doc__', '__members__', '__module__'] +
|
||||
self._member_names_)
|
||||
|
||||
@property
|
||||
def __members__(cls):
|
||||
"""Returns a mapping of member name->value.
|
||||
|
||||
This mapping lists all enum members, including aliases. Note that this
|
||||
is a copy of the internal mapping.
|
||||
|
||||
"""
|
||||
return cls._member_map_.copy()
|
||||
|
||||
def __getattr__(cls, name):
|
||||
"""Return the enum member matching `name`
|
||||
|
||||
We use __getattr__ instead of descriptors or inserting into the enum
|
||||
class' __dict__ in order to support `name` and `value` being both
|
||||
properties for enum members (which live in the class' __dict__) and
|
||||
enum members themselves.
|
||||
|
||||
"""
|
||||
if _is_dunder(name):
|
||||
raise AttributeError(name)
|
||||
try:
|
||||
return cls._member_map_[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
|
||||
def __getitem__(cls, name):
|
||||
return cls._member_map_[name]
|
||||
|
||||
def __iter__(cls):
|
||||
return (cls._member_map_[name] for name in cls._member_names_)
|
||||
|
||||
def __reversed__(cls):
|
||||
return (cls._member_map_[name] for name in reversed(cls._member_names_))
|
||||
|
||||
def __len__(cls):
|
||||
return len(cls._member_names_)
|
||||
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __repr__(cls):
|
||||
return "<enum %r>" % cls.__name__
|
||||
|
||||
def __setattr__(cls, name, value):
|
||||
"""Block attempts to reassign Enum members.
|
||||
|
||||
A simple assignment to the class namespace only changes one of the
|
||||
several possible ways to get an Enum member from the Enum class,
|
||||
resulting in an inconsistent Enumeration.
|
||||
|
||||
"""
|
||||
member_map = cls.__dict__.get('_member_map_', {})
|
||||
if name in member_map:
|
||||
raise AttributeError('Cannot reassign members.')
|
||||
super(EnumMeta, cls).__setattr__(name, value)
|
||||
|
||||
def _create_(cls, class_name, names=None, module=None, type=None, start=1):
|
||||
"""Convenience method to create a new Enum class.
|
||||
|
||||
`names` can be:
|
||||
|
||||
* A string containing member names, separated either with spaces or
|
||||
commas. Values are auto-numbered from 1.
|
||||
* An iterable of member names. Values are auto-numbered from 1.
|
||||
* An iterable of (member name, value) pairs.
|
||||
* A mapping of member name -> value.
|
||||
|
||||
"""
|
||||
if pyver < 3.0:
|
||||
# if class_name is unicode, attempt a conversion to ASCII
|
||||
if isinstance(class_name, unicode):
|
||||
try:
|
||||
class_name = class_name.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
raise TypeError('%r is not representable in ASCII' % class_name)
|
||||
metacls = cls.__class__
|
||||
if type is None:
|
||||
bases = (cls, )
|
||||
else:
|
||||
bases = (type, cls)
|
||||
classdict = metacls.__prepare__(class_name, bases)
|
||||
_order_ = []
|
||||
|
||||
# special processing needed for names?
|
||||
if isinstance(names, basestring):
|
||||
names = names.replace(',', ' ').split()
|
||||
if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
|
||||
names = [(e, i+start) for (i, e) in enumerate(names)]
|
||||
|
||||
# Here, names is either an iterable of (name, value) or a mapping.
|
||||
item = None # in case names is empty
|
||||
for item in names:
|
||||
if isinstance(item, basestring):
|
||||
member_name, member_value = item, names[item]
|
||||
else:
|
||||
member_name, member_value = item
|
||||
classdict[member_name] = member_value
|
||||
_order_.append(member_name)
|
||||
# only set _order_ in classdict if name/value was not from a mapping
|
||||
if not isinstance(item, basestring):
|
||||
classdict['_order_'] = _order_
|
||||
enum_class = metacls.__new__(metacls, class_name, bases, classdict)
|
||||
|
||||
# TODO: replace the frame hack if a blessed way to know the calling
|
||||
# module is ever developed
|
||||
if module is None:
|
||||
try:
|
||||
module = _sys._getframe(2).f_globals['__name__']
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
if module is None:
|
||||
_make_class_unpicklable(enum_class)
|
||||
else:
|
||||
enum_class.__module__ = module
|
||||
|
||||
return enum_class
|
||||
|
||||
@staticmethod
|
||||
def _get_mixins_(bases):
|
||||
"""Returns the type for creating enum members, and the first inherited
|
||||
enum class.
|
||||
|
||||
bases: the tuple of bases that was given to __new__
|
||||
|
||||
"""
|
||||
if not bases or Enum is None:
|
||||
return object, Enum
|
||||
|
||||
|
||||
# double check that we are not subclassing a class with existing
|
||||
# enumeration members; while we're at it, see if any other data
|
||||
# type has been mixed in so we can use the correct __new__
|
||||
member_type = first_enum = None
|
||||
for base in bases:
|
||||
if (base is not Enum and
|
||||
issubclass(base, Enum) and
|
||||
base._member_names_):
|
||||
raise TypeError("Cannot extend enumerations")
|
||||
# base is now the last base in bases
|
||||
if not issubclass(base, Enum):
|
||||
raise TypeError("new enumerations must be created as "
|
||||
"`ClassName([mixin_type,] enum_type)`")
|
||||
|
||||
# get correct mix-in type (either mix-in type of Enum subclass, or
|
||||
# first base if last base is Enum)
|
||||
if not issubclass(bases[0], Enum):
|
||||
member_type = bases[0] # first data type
|
||||
first_enum = bases[-1] # enum type
|
||||
else:
|
||||
for base in bases[0].__mro__:
|
||||
# most common: (IntEnum, int, Enum, object)
|
||||
# possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
|
||||
# <class 'int'>, <Enum 'Enum'>,
|
||||
# <class 'object'>)
|
||||
if issubclass(base, Enum):
|
||||
if first_enum is None:
|
||||
first_enum = base
|
||||
else:
|
||||
if member_type is None:
|
||||
member_type = base
|
||||
|
||||
return member_type, first_enum
|
||||
|
||||
if pyver < 3.0:
|
||||
@staticmethod
|
||||
def _find_new_(classdict, member_type, first_enum):
|
||||
"""Returns the __new__ to be used for creating the enum members.
|
||||
|
||||
classdict: the class dictionary given to __new__
|
||||
member_type: the data type whose __new__ will be used by default
|
||||
first_enum: enumeration to check for an overriding __new__
|
||||
|
||||
"""
|
||||
# now find the correct __new__, checking to see of one was defined
|
||||
# by the user; also check earlier enum classes in case a __new__ was
|
||||
# saved as __member_new__
|
||||
__new__ = classdict.get('__new__', None)
|
||||
if __new__:
|
||||
return None, True, True # __new__, save_new, use_args
|
||||
|
||||
N__new__ = getattr(None, '__new__')
|
||||
O__new__ = getattr(object, '__new__')
|
||||
if Enum is None:
|
||||
E__new__ = N__new__
|
||||
else:
|
||||
E__new__ = Enum.__dict__['__new__']
|
||||
# check all possibles for __member_new__ before falling back to
|
||||
# __new__
|
||||
for method in ('__member_new__', '__new__'):
|
||||
for possible in (member_type, first_enum):
|
||||
try:
|
||||
target = possible.__dict__[method]
|
||||
except (AttributeError, KeyError):
|
||||
target = getattr(possible, method, None)
|
||||
if target not in [
|
||||
None,
|
||||
N__new__,
|
||||
O__new__,
|
||||
E__new__,
|
||||
]:
|
||||
if method == '__member_new__':
|
||||
classdict['__new__'] = target
|
||||
return None, False, True
|
||||
if isinstance(target, staticmethod):
|
||||
target = target.__get__(member_type)
|
||||
__new__ = target
|
||||
break
|
||||
if __new__ is not None:
|
||||
break
|
||||
else:
|
||||
__new__ = object.__new__
|
||||
|
||||
# if a non-object.__new__ is used then whatever value/tuple was
|
||||
# assigned to the enum member name will be passed to __new__ and to the
|
||||
# new enum member's __init__
|
||||
if __new__ is object.__new__:
|
||||
use_args = False
|
||||
else:
|
||||
use_args = True
|
||||
|
||||
return __new__, False, use_args
|
||||
else:
|
||||
@staticmethod
|
||||
def _find_new_(classdict, member_type, first_enum):
|
||||
"""Returns the __new__ to be used for creating the enum members.
|
||||
|
||||
classdict: the class dictionary given to __new__
|
||||
member_type: the data type whose __new__ will be used by default
|
||||
first_enum: enumeration to check for an overriding __new__
|
||||
|
||||
"""
|
||||
# now find the correct __new__, checking to see of one was defined
|
||||
# by the user; also check earlier enum classes in case a __new__ was
|
||||
# saved as __member_new__
|
||||
__new__ = classdict.get('__new__', None)
|
||||
|
||||
# should __new__ be saved as __member_new__ later?
|
||||
save_new = __new__ is not None
|
||||
|
||||
if __new__ is None:
|
||||
# check all possibles for __member_new__ before falling back to
|
||||
# __new__
|
||||
for method in ('__member_new__', '__new__'):
|
||||
for possible in (member_type, first_enum):
|
||||
target = getattr(possible, method, None)
|
||||
if target not in (
|
||||
None,
|
||||
None.__new__,
|
||||
object.__new__,
|
||||
Enum.__new__,
|
||||
):
|
||||
__new__ = target
|
||||
break
|
||||
if __new__ is not None:
|
||||
break
|
||||
else:
|
||||
__new__ = object.__new__
|
||||
|
||||
# if a non-object.__new__ is used then whatever value/tuple was
|
||||
# assigned to the enum member name will be passed to __new__ and to the
|
||||
# new enum member's __init__
|
||||
if __new__ is object.__new__:
|
||||
use_args = False
|
||||
else:
|
||||
use_args = True
|
||||
|
||||
return __new__, save_new, use_args
|
||||
|
||||
|
||||
########################################################
|
||||
# In order to support Python 2 and 3 with a single
|
||||
# codebase we have to create the Enum methods separately
|
||||
# and then use the `type(name, bases, dict)` method to
|
||||
# create the class.
|
||||
########################################################
|
||||
temp_enum_dict = {}
|
||||
temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
|
||||
|
||||
def __new__(cls, value):
|
||||
# all enum instances are actually created during class construction
|
||||
# without calling this method; this method is called by the metaclass'
|
||||
# __call__ (i.e. Color(3) ), and by pickle
|
||||
if type(value) is cls:
|
||||
# For lookups like Color(Color.red)
|
||||
value = value.value
|
||||
#return value
|
||||
# by-value search for a matching enum member
|
||||
# see if it's in the reverse mapping (for hashable values)
|
||||
try:
|
||||
if value in cls._value2member_map_:
|
||||
return cls._value2member_map_[value]
|
||||
except TypeError:
|
||||
# not there, now do long search -- O(n) behavior
|
||||
for member in cls._member_map_.values():
|
||||
if member.value == value:
|
||||
return member
|
||||
raise ValueError("%s is not a valid %s" % (value, cls.__name__))
|
||||
temp_enum_dict['__new__'] = __new__
|
||||
del __new__
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s.%s: %r>" % (
|
||||
self.__class__.__name__, self._name_, self._value_)
|
||||
temp_enum_dict['__repr__'] = __repr__
|
||||
del __repr__
|
||||
|
||||
def __str__(self):
|
||||
return "%s.%s" % (self.__class__.__name__, self._name_)
|
||||
temp_enum_dict['__str__'] = __str__
|
||||
del __str__
|
||||
|
||||
if pyver >= 3.0:
|
||||
def __dir__(self):
|
||||
added_behavior = [
|
||||
m
|
||||
for cls in self.__class__.mro()
|
||||
for m in cls.__dict__
|
||||
if m[0] != '_' and m not in self._member_map_
|
||||
]
|
||||
return (['__class__', '__doc__', '__module__', ] + added_behavior)
|
||||
temp_enum_dict['__dir__'] = __dir__
|
||||
del __dir__
|
||||
|
||||
def __format__(self, format_spec):
|
||||
# mixed-in Enums should use the mixed-in type's __format__, otherwise
|
||||
# we can get strange results with the Enum name showing up instead of
|
||||
# the value
|
||||
|
||||
# pure Enum branch
|
||||
if self._member_type_ is object:
|
||||
cls = str
|
||||
val = str(self)
|
||||
# mix-in branch
|
||||
else:
|
||||
cls = self._member_type_
|
||||
val = self.value
|
||||
return cls.__format__(val, format_spec)
|
||||
temp_enum_dict['__format__'] = __format__
|
||||
del __format__
|
||||
|
||||
|
||||
####################################
|
||||
# Python's less than 2.6 use __cmp__
|
||||
|
||||
if pyver < 2.6:
|
||||
|
||||
def __cmp__(self, other):
|
||||
if type(other) is self.__class__:
|
||||
if self is other:
|
||||
return 0
|
||||
return -1
|
||||
return NotImplemented
|
||||
raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
|
||||
temp_enum_dict['__cmp__'] = __cmp__
|
||||
del __cmp__
|
||||
|
||||
else:
|
||||
|
||||
def __le__(self, other):
|
||||
raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
|
||||
temp_enum_dict['__le__'] = __le__
|
||||
del __le__
|
||||
|
||||
def __lt__(self, other):
|
||||
raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
|
||||
temp_enum_dict['__lt__'] = __lt__
|
||||
del __lt__
|
||||
|
||||
def __ge__(self, other):
|
||||
raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
|
||||
temp_enum_dict['__ge__'] = __ge__
|
||||
del __ge__
|
||||
|
||||
def __gt__(self, other):
|
||||
raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
|
||||
temp_enum_dict['__gt__'] = __gt__
|
||||
del __gt__
|
||||
|
||||
|
||||
def __eq__(self, other):
|
||||
if type(other) is self.__class__:
|
||||
return self is other
|
||||
return NotImplemented
|
||||
temp_enum_dict['__eq__'] = __eq__
|
||||
del __eq__
|
||||
|
||||
def __ne__(self, other):
|
||||
if type(other) is self.__class__:
|
||||
return self is not other
|
||||
return NotImplemented
|
||||
temp_enum_dict['__ne__'] = __ne__
|
||||
del __ne__
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._name_)
|
||||
temp_enum_dict['__hash__'] = __hash__
|
||||
del __hash__
|
||||
|
||||
def __reduce_ex__(self, proto):
|
||||
return self.__class__, (self._value_, )
|
||||
temp_enum_dict['__reduce_ex__'] = __reduce_ex__
|
||||
del __reduce_ex__
|
||||
|
||||
# _RouteClassAttributeToGetattr is used to provide access to the `name`
|
||||
# and `value` properties of enum members while keeping some measure of
|
||||
# protection from modification, while still allowing for an enumeration
|
||||
# to have members named `name` and `value`. This works because enumeration
|
||||
# members are not set directly on the enum class -- __getattr__ is
|
||||
# used to look them up.
|
||||
|
||||
@_RouteClassAttributeToGetattr
|
||||
def name(self):
|
||||
return self._name_
|
||||
temp_enum_dict['name'] = name
|
||||
del name
|
||||
|
||||
@_RouteClassAttributeToGetattr
|
||||
def value(self):
|
||||
return self._value_
|
||||
temp_enum_dict['value'] = value
|
||||
del value
|
||||
|
||||
@classmethod
|
||||
def _convert(cls, name, module, filter, source=None):
|
||||
"""
|
||||
Create a new Enum subclass that replaces a collection of global constants
|
||||
"""
|
||||
# convert all constants from source (or module) that pass filter() to
|
||||
# a new Enum called name, and export the enum and its members back to
|
||||
# module;
|
||||
# also, replace the __reduce_ex__ method so unpickling works in
|
||||
# previous Python versions
|
||||
module_globals = vars(_sys.modules[module])
|
||||
if source:
|
||||
source = vars(source)
|
||||
else:
|
||||
source = module_globals
|
||||
members = dict((name, value) for name, value in source.items() if filter(name))
|
||||
cls = cls(name, members, module=module)
|
||||
cls.__reduce_ex__ = _reduce_ex_by_name
|
||||
module_globals.update(cls.__members__)
|
||||
module_globals[name] = cls
|
||||
return cls
|
||||
temp_enum_dict['_convert'] = _convert
|
||||
del _convert
|
||||
|
||||
Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
|
||||
del temp_enum_dict
|
||||
|
||||
# Enum has now been created
|
||||
###########################
|
||||
|
||||
class IntEnum(int, Enum):
|
||||
"""Enum where members are also (and must be) ints"""
|
||||
|
||||
def _reduce_ex_by_name(self, proto):
|
||||
return self.name
|
||||
|
||||
def unique(enumeration):
|
||||
"""Class decorator that ensures only unique members exist in an enumeration."""
|
||||
duplicates = []
|
||||
for name, member in enumeration.__members__.items():
|
||||
if name != member.name:
|
||||
duplicates.append((name, member.name))
|
||||
if duplicates:
|
||||
duplicate_names = ', '.join(
|
||||
["%s -> %s" % (alias, name) for (alias, name) in duplicates]
|
||||
)
|
||||
raise ValueError('duplicate names found in %r: %s' %
|
||||
(enumeration, duplicate_names)
|
||||
)
|
||||
return enumeration
|
||||
BIN
Lib/site-packages/pipenv/vendor/backports/enum/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/backports/enum/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
7
Lib/site-packages/pipenv/vendor/backports/functools_lru_cache.LICENSE
vendored
Normal file
7
Lib/site-packages/pipenv/vendor/backports/functools_lru_cache.LICENSE
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright Jason R. Coombs
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
196
Lib/site-packages/pipenv/vendor/backports/functools_lru_cache.py
vendored
Normal file
196
Lib/site-packages/pipenv/vendor/backports/functools_lru_cache.py
vendored
Normal file
@@ -0,0 +1,196 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
from collections import namedtuple
|
||||
from threading import RLock
|
||||
|
||||
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
|
||||
@functools.wraps(functools.update_wrapper)
|
||||
def update_wrapper(
|
||||
wrapper,
|
||||
wrapped,
|
||||
assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES,
|
||||
):
|
||||
"""
|
||||
Patch two bugs in functools.update_wrapper.
|
||||
"""
|
||||
# workaround for http://bugs.python.org/issue3445
|
||||
assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
|
||||
wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
|
||||
# workaround for https://bugs.python.org/issue17482
|
||||
wrapper.__wrapped__ = wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
class _HashedSeq(list):
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
self[:] = tup
|
||||
self.hashvalue = hash(tup)
|
||||
|
||||
def __hash__(self):
|
||||
return self.hashvalue
|
||||
|
||||
|
||||
def _make_key(
|
||||
args,
|
||||
kwds,
|
||||
typed,
|
||||
kwd_mark=(object(),),
|
||||
fasttypes=set([int, str, frozenset, type(None)]),
|
||||
sorted=sorted,
|
||||
tuple=tuple,
|
||||
type=type,
|
||||
len=len,
|
||||
):
|
||||
'Make a cache key from optionally typed positional and keyword arguments'
|
||||
key = args
|
||||
if kwds:
|
||||
sorted_items = sorted(kwds.items())
|
||||
key += kwd_mark
|
||||
for item in sorted_items:
|
||||
key += item
|
||||
if typed:
|
||||
key += tuple(type(v) for v in args)
|
||||
if kwds:
|
||||
key += tuple(type(v) for k, v in sorted_items)
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return _HashedSeq(key)
|
||||
|
||||
|
||||
def lru_cache(maxsize=100, typed=False):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
If *typed* is True, arguments of different types will be cached separately.
|
||||
For example, f(3.0) and f(3) will be treated as distinct calls with
|
||||
distinct results.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
def decorating_function(user_function):
|
||||
|
||||
cache = dict()
|
||||
stats = [0, 0] # make statistics updateable non-locally
|
||||
HITS, MISSES = 0, 1 # names for the stats fields
|
||||
make_key = _make_key
|
||||
cache_get = cache.get # bound method to lookup key or return None
|
||||
_len = len # localize the global len() function
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
nonlocal_root = [root] # make updateable non-locally
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# no caching, just do a statistics update after a successful call
|
||||
result = user_function(*args, **kwds)
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# simple caching without ordering or size limit
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(
|
||||
key, root
|
||||
) # root used here as a unique not-found sentinel
|
||||
if result is not root:
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# size limited caching that tracks accesses by recency
|
||||
key = make_key(args, kwds, typed) if kwds or typed else args
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# record recent use of the key by moving it
|
||||
# to the front of the list
|
||||
root, = nonlocal_root
|
||||
link_prev, link_next, key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
root, = nonlocal_root
|
||||
if key in cache:
|
||||
# getting here means that this same key was added to the
|
||||
# cache while the lock was released. since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif _len(cache) >= maxsize:
|
||||
# use the old root to store the new key and result
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# empty the oldest link and make it the new root
|
||||
root = nonlocal_root[0] = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# now update the cache dictionary for the new links
|
||||
del cache[oldkey]
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# put result in a new link at the front of the list
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
root = nonlocal_root[0]
|
||||
root[:] = [root, root, None, None]
|
||||
stats[:] = [0, 0]
|
||||
|
||||
wrapper.__wrapped__ = user_function
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
||||
22
Lib/site-packages/pipenv/vendor/backports/shutil_get_terminal_size/LICENSE
vendored
Normal file
22
Lib/site-packages/pipenv/vendor/backports/shutil_get_terminal_size/LICENSE
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Christopher Rosell
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
11
Lib/site-packages/pipenv/vendor/backports/shutil_get_terminal_size/__init__.py
vendored
Normal file
11
Lib/site-packages/pipenv/vendor/backports/shutil_get_terminal_size/__init__.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
"""A backport of the get_terminal_size function from Python 3.3's shutil."""
|
||||
|
||||
__title__ = "backports.shutil_get_terminal_size"
|
||||
__version__ = "1.0.0"
|
||||
__license__ = "MIT"
|
||||
__author__ = "Christopher Rosell"
|
||||
__copyright__ = "Copyright 2014 Christopher Rosell"
|
||||
|
||||
__all__ = ["get_terminal_size"]
|
||||
|
||||
from .get_terminal_size import get_terminal_size
|
||||
Binary file not shown.
Binary file not shown.
101
Lib/site-packages/pipenv/vendor/backports/shutil_get_terminal_size/get_terminal_size.py
vendored
Normal file
101
Lib/site-packages/pipenv/vendor/backports/shutil_get_terminal_size/get_terminal_size.py
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
"""This is a backport of shutil.get_terminal_size from Python 3.3.
|
||||
|
||||
The original implementation is in C, but here we use the ctypes and
|
||||
fcntl modules to create a pure Python version of os.get_terminal_size.
|
||||
"""
|
||||
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
__all__ = ["get_terminal_size"]
|
||||
|
||||
|
||||
terminal_size = namedtuple("terminal_size", "columns lines")
|
||||
|
||||
try:
|
||||
from ctypes import windll, create_string_buffer
|
||||
|
||||
_handles = {
|
||||
0: windll.kernel32.GetStdHandle(-10),
|
||||
1: windll.kernel32.GetStdHandle(-11),
|
||||
2: windll.kernel32.GetStdHandle(-12),
|
||||
}
|
||||
|
||||
def _get_terminal_size(fd):
|
||||
columns = lines = 0
|
||||
|
||||
try:
|
||||
handle = _handles[fd]
|
||||
csbi = create_string_buffer(22)
|
||||
res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
|
||||
if res:
|
||||
res = struct.unpack("hhhhHhhhhhh", csbi.raw)
|
||||
left, top, right, bottom = res[5:9]
|
||||
columns = right - left + 1
|
||||
lines = bottom - top + 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return terminal_size(columns, lines)
|
||||
|
||||
except ImportError:
|
||||
import fcntl
|
||||
import termios
|
||||
|
||||
def _get_terminal_size(fd):
|
||||
try:
|
||||
res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4)
|
||||
lines, columns = struct.unpack("hh", res)
|
||||
except Exception:
|
||||
columns = lines = 0
|
||||
|
||||
return terminal_size(columns, lines)
|
||||
|
||||
|
||||
def get_terminal_size(fallback=(80, 24)):
|
||||
"""Get the size of the terminal window.
|
||||
|
||||
For each of the two dimensions, the environment variable, COLUMNS
|
||||
and LINES respectively, is checked. If the variable is defined and
|
||||
the value is a positive integer, it is used.
|
||||
|
||||
When COLUMNS or LINES is not defined, which is the common case,
|
||||
the terminal connected to sys.__stdout__ is queried
|
||||
by invoking os.get_terminal_size.
|
||||
|
||||
If the terminal size cannot be successfully queried, either because
|
||||
the system doesn't support querying, or because we are not
|
||||
connected to a terminal, the value given in fallback parameter
|
||||
is used. Fallback defaults to (80, 24) which is the default
|
||||
size used by many terminal emulators.
|
||||
|
||||
The value returned is a named tuple of type os.terminal_size.
|
||||
"""
|
||||
# Try the environment first
|
||||
try:
|
||||
columns = int(os.environ["COLUMNS"])
|
||||
except (KeyError, ValueError):
|
||||
columns = 0
|
||||
|
||||
try:
|
||||
lines = int(os.environ["LINES"])
|
||||
except (KeyError, ValueError):
|
||||
lines = 0
|
||||
|
||||
# Only query if necessary
|
||||
if columns <= 0 or lines <= 0:
|
||||
try:
|
||||
size = _get_terminal_size(sys.__stdout__.fileno())
|
||||
except (NameError, OSError):
|
||||
size = terminal_size(*fallback)
|
||||
|
||||
if columns <= 0:
|
||||
columns = size.columns
|
||||
if lines <= 0:
|
||||
lines = size.lines
|
||||
|
||||
return terminal_size(columns, lines)
|
||||
|
||||
255
Lib/site-packages/pipenv/vendor/backports/weakref.LICENSE
vendored
Normal file
255
Lib/site-packages/pipenv/vendor/backports/weakref.LICENSE
vendored
Normal file
@@ -0,0 +1,255 @@
|
||||
A. HISTORY OF THE SOFTWARE
|
||||
==========================
|
||||
|
||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||
as a successor of a language called ABC. Guido remains Python's
|
||||
principal author, although it includes many contributions from others.
|
||||
|
||||
In 1995, Guido continued his work on Python at the Corporation for
|
||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||
in Reston, Virginia where he released several versions of the
|
||||
software.
|
||||
|
||||
In May 2000, Guido and the Python core development team moved to
|
||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||
non-profit organization created specifically to own Python-related
|
||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||
the PSF.
|
||||
|
||||
All Python releases are Open Source (see http://www.opensource.org for
|
||||
the Open Source Definition). Historically, most, but not all, Python
|
||||
releases have also been GPL-compatible; the table below summarizes
|
||||
the various releases.
|
||||
|
||||
Release Derived Year Owner GPL-
|
||||
from compatible? (1)
|
||||
|
||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||
1.6 1.5.2 2000 CNRI no
|
||||
2.0 1.6 2000 BeOpen.com no
|
||||
1.6.1 1.6 2001 CNRI yes (2)
|
||||
2.1 2.0+1.6.1 2001 PSF no
|
||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||
2.1.2 2.1.1 2002 PSF yes
|
||||
2.1.3 2.1.2 2002 PSF yes
|
||||
2.2 and above 2.1.1 2001-now PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||
a modified version without making your changes open source. The
|
||||
GPL-compatible licenses make it possible to combine Python with
|
||||
other software that is released under the GPL; the others don't.
|
||||
|
||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||
because its license has a choice of law clause. According to
|
||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||
is "not incompatible" with the GPL.
|
||||
|
||||
Thanks to the many outside volunteers who have worked under Guido's
|
||||
direction to make these releases possible.
|
||||
|
||||
|
||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||
===============================================================
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights
|
||||
Reserved" are retained in Python alone or in any derivative version prepared by
|
||||
Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||
-------------------------------------------
|
||||
|
||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||
|
||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||
Individual or Organization ("Licensee") accessing and otherwise using
|
||||
this software in source or binary form and its associated
|
||||
documentation ("the Software").
|
||||
|
||||
2. Subject to the terms and conditions of this BeOpen Python License
|
||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||
and/or display publicly, prepare derivative works, distribute, and
|
||||
otherwise use the Software alone or in any derivative version,
|
||||
provided, however, that the BeOpen Python License is retained in the
|
||||
Software, alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
5. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
6. This License Agreement shall be governed by and interpreted in all
|
||||
respects by the law of the State of California, excluding conflict of
|
||||
law provisions. Nothing in this License Agreement shall be deemed to
|
||||
create any relationship of agency, partnership, or joint venture
|
||||
between BeOpen and Licensee. This License Agreement does not grant
|
||||
permission to use BeOpen trademarks or trade names in a trademark
|
||||
sense to endorse or promote products or services of Licensee, or any
|
||||
third party. As an exception, the "BeOpen Python" logos available at
|
||||
http://www.pythonlabs.com/logos.html may be used according to the
|
||||
permissions granted on that web page.
|
||||
|
||||
7. By copying, installing or otherwise using the software, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||
---------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||
source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||
license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||
alone or in any derivative version, provided, however, that CNRI's
|
||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||
Agreement, Licensee may substitute the following text (omitting the
|
||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||
conditions in CNRI's License Agreement. This Agreement together with
|
||||
Python 1.6.1 may be located on the Internet using the following
|
||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||
Agreement may also be obtained from a proxy server on the Internet
|
||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python 1.6.1.
|
||||
|
||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. This License Agreement shall be governed by the federal
|
||||
intellectual property law of the United States, including without
|
||||
limitation the federal copyright law, and, to the extent such
|
||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||
Virginia, excluding Virginia's conflict of law provisions.
|
||||
Notwithstanding the foregoing, with regard to derivative works based
|
||||
on Python 1.6.1 that incorporate non-separable material that was
|
||||
previously distributed under the GNU General Public License (GPL), the
|
||||
law of the Commonwealth of Virginia shall govern this License
|
||||
Agreement only as to issues arising under or with respect to
|
||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||
License Agreement shall be deemed to create any relationship of
|
||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||
License Agreement does not grant permission to use CNRI trademarks or
|
||||
trade name in a trademark sense to endorse or promote products or
|
||||
services of Licensee, or any third party.
|
||||
|
||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
ACCEPT
|
||||
|
||||
|
||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||
--------------------------------------------------
|
||||
|
||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||
The Netherlands. All rights reserved.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its
|
||||
documentation for any purpose and without fee is hereby granted,
|
||||
provided that the above copyright notice appear in all copies and that
|
||||
both that copyright notice and this permission notice appear in
|
||||
supporting documentation, and that the name of Stichting Mathematisch
|
||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
151
Lib/site-packages/pipenv/vendor/backports/weakref.py
vendored
Normal file
151
Lib/site-packages/pipenv/vendor/backports/weakref.py
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
"""
|
||||
Partial backport of Python 3.6's weakref module:
|
||||
|
||||
finalize (new in Python 3.4)
|
||||
|
||||
Backport modifications are marked with "XXX backport".
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
from weakref import ref
|
||||
|
||||
__all__ = ['finalize']
|
||||
|
||||
|
||||
class finalize(object):
|
||||
"""Class for finalization of weakrefable objects
|
||||
|
||||
finalize(obj, func, *args, **kwargs) returns a callable finalizer
|
||||
object which will be called when obj is garbage collected. The
|
||||
first time the finalizer is called it evaluates func(*arg, **kwargs)
|
||||
and returns the result. After this the finalizer is dead, and
|
||||
calling it just returns None.
|
||||
|
||||
When the program exits any remaining finalizers for which the
|
||||
atexit attribute is true will be run in reverse order of creation.
|
||||
By default atexit is true.
|
||||
"""
|
||||
|
||||
# Finalizer objects don't have any state of their own. They are
|
||||
# just used as keys to lookup _Info objects in the registry. This
|
||||
# ensures that they cannot be part of a ref-cycle.
|
||||
|
||||
__slots__ = ()
|
||||
_registry = {}
|
||||
_shutdown = False
|
||||
_index_iter = itertools.count()
|
||||
_dirty = False
|
||||
_registered_with_atexit = False
|
||||
|
||||
class _Info(object):
|
||||
__slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
|
||||
|
||||
def __init__(self, obj, func, *args, **kwargs):
|
||||
if not self._registered_with_atexit:
|
||||
# We may register the exit function more than once because
|
||||
# of a thread race, but that is harmless
|
||||
import atexit
|
||||
atexit.register(self._exitfunc)
|
||||
finalize._registered_with_atexit = True
|
||||
info = self._Info()
|
||||
info.weakref = ref(obj, self)
|
||||
info.func = func
|
||||
info.args = args
|
||||
info.kwargs = kwargs or None
|
||||
info.atexit = True
|
||||
info.index = next(self._index_iter)
|
||||
self._registry[self] = info
|
||||
finalize._dirty = True
|
||||
|
||||
def __call__(self, _=None):
|
||||
"""If alive then mark as dead and return func(*args, **kwargs);
|
||||
otherwise return None"""
|
||||
info = self._registry.pop(self, None)
|
||||
if info and not self._shutdown:
|
||||
return info.func(*info.args, **(info.kwargs or {}))
|
||||
|
||||
def detach(self):
|
||||
"""If alive then mark as dead and return (obj, func, args, kwargs);
|
||||
otherwise return None"""
|
||||
info = self._registry.get(self)
|
||||
obj = info and info.weakref()
|
||||
if obj is not None and self._registry.pop(self, None):
|
||||
return (obj, info.func, info.args, info.kwargs or {})
|
||||
|
||||
def peek(self):
|
||||
"""If alive then return (obj, func, args, kwargs);
|
||||
otherwise return None"""
|
||||
info = self._registry.get(self)
|
||||
obj = info and info.weakref()
|
||||
if obj is not None:
|
||||
return (obj, info.func, info.args, info.kwargs or {})
|
||||
|
||||
@property
|
||||
def alive(self):
|
||||
"""Whether finalizer is alive"""
|
||||
return self in self._registry
|
||||
|
||||
@property
|
||||
def atexit(self):
|
||||
"""Whether finalizer should be called at exit"""
|
||||
info = self._registry.get(self)
|
||||
return bool(info) and info.atexit
|
||||
|
||||
@atexit.setter
|
||||
def atexit(self, value):
|
||||
info = self._registry.get(self)
|
||||
if info:
|
||||
info.atexit = bool(value)
|
||||
|
||||
def __repr__(self):
|
||||
info = self._registry.get(self)
|
||||
obj = info and info.weakref()
|
||||
if obj is None:
|
||||
return '<%s object at %#x; dead>' % (type(self).__name__, id(self))
|
||||
else:
|
||||
return '<%s object at %#x; for %r at %#x>' % \
|
||||
(type(self).__name__, id(self), type(obj).__name__, id(obj))
|
||||
|
||||
@classmethod
|
||||
def _select_for_exit(cls):
|
||||
# Return live finalizers marked for exit, oldest first
|
||||
L = [(f,i) for (f,i) in cls._registry.items() if i.atexit]
|
||||
L.sort(key=lambda item:item[1].index)
|
||||
return [f for (f,i) in L]
|
||||
|
||||
@classmethod
|
||||
def _exitfunc(cls):
|
||||
# At shutdown invoke finalizers for which atexit is true.
|
||||
# This is called once all other non-daemonic threads have been
|
||||
# joined.
|
||||
reenable_gc = False
|
||||
try:
|
||||
if cls._registry:
|
||||
import gc
|
||||
if gc.isenabled():
|
||||
reenable_gc = True
|
||||
gc.disable()
|
||||
pending = None
|
||||
while True:
|
||||
if pending is None or finalize._dirty:
|
||||
pending = cls._select_for_exit()
|
||||
finalize._dirty = False
|
||||
if not pending:
|
||||
break
|
||||
f = pending.pop()
|
||||
try:
|
||||
# gc is disabled, so (assuming no daemonic
|
||||
# threads) the following is the only line in
|
||||
# this function which might trigger creation
|
||||
# of a new finalizer
|
||||
f()
|
||||
except Exception:
|
||||
sys.excepthook(*sys.exc_info())
|
||||
assert f not in cls._registry
|
||||
finally:
|
||||
# prevent any more finalizers from executing during shutdown
|
||||
finalize._shutdown = True
|
||||
if reenable_gc:
|
||||
gc.enable()
|
||||
12
Lib/site-packages/pipenv/vendor/cached-property.LICENSE
vendored
Normal file
12
Lib/site-packages/pipenv/vendor/cached-property.LICENSE
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
Copyright (c) 2015, Daniel Greenfeld
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of cached-property nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
152
Lib/site-packages/pipenv/vendor/cached_property.py
vendored
Normal file
152
Lib/site-packages/pipenv/vendor/cached_property.py
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__author__ = "Daniel Greenfeld"
|
||||
__email__ = "pydanny@gmail.com"
|
||||
__version__ = "1.5.1"
|
||||
__license__ = "BSD"
|
||||
|
||||
from time import time
|
||||
import threading
|
||||
|
||||
try:
|
||||
import asyncio
|
||||
except (ImportError, SyntaxError):
|
||||
asyncio = None
|
||||
|
||||
|
||||
class cached_property(object):
|
||||
"""
|
||||
A property that is only computed once per instance and then replaces itself
|
||||
with an ordinary attribute. Deleting the attribute resets the property.
|
||||
Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
|
||||
""" # noqa
|
||||
|
||||
def __init__(self, func):
|
||||
self.__doc__ = getattr(func, "__doc__")
|
||||
self.func = func
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
|
||||
if asyncio and asyncio.iscoroutinefunction(self.func):
|
||||
return self._wrap_in_coroutine(obj)
|
||||
|
||||
value = obj.__dict__[self.func.__name__] = self.func(obj)
|
||||
return value
|
||||
|
||||
def _wrap_in_coroutine(self, obj):
|
||||
|
||||
@asyncio.coroutine
|
||||
def wrapper():
|
||||
future = asyncio.ensure_future(self.func(obj))
|
||||
obj.__dict__[self.func.__name__] = future
|
||||
return future
|
||||
|
||||
return wrapper()
|
||||
|
||||
|
||||
class threaded_cached_property(object):
|
||||
"""
|
||||
A cached_property version for use in environments where multiple threads
|
||||
might concurrently try to access the property.
|
||||
"""
|
||||
|
||||
def __init__(self, func):
|
||||
self.__doc__ = getattr(func, "__doc__")
|
||||
self.func = func
|
||||
self.lock = threading.RLock()
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
|
||||
obj_dict = obj.__dict__
|
||||
name = self.func.__name__
|
||||
with self.lock:
|
||||
try:
|
||||
# check if the value was computed before the lock was acquired
|
||||
return obj_dict[name]
|
||||
|
||||
except KeyError:
|
||||
# if not, do the calculation and release the lock
|
||||
return obj_dict.setdefault(name, self.func(obj))
|
||||
|
||||
|
||||
class cached_property_with_ttl(object):
|
||||
"""
|
||||
A property that is only computed once per instance and then replaces itself
|
||||
with an ordinary attribute. Setting the ttl to a number expresses how long
|
||||
the property will last before being timed out.
|
||||
"""
|
||||
|
||||
def __init__(self, ttl=None):
|
||||
if callable(ttl):
|
||||
func = ttl
|
||||
ttl = None
|
||||
else:
|
||||
func = None
|
||||
self.ttl = ttl
|
||||
self._prepare_func(func)
|
||||
|
||||
def __call__(self, func):
|
||||
self._prepare_func(func)
|
||||
return self
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
if obj is None:
|
||||
return self
|
||||
|
||||
now = time()
|
||||
obj_dict = obj.__dict__
|
||||
name = self.__name__
|
||||
try:
|
||||
value, last_updated = obj_dict[name]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
ttl_expired = self.ttl and self.ttl < now - last_updated
|
||||
if not ttl_expired:
|
||||
return value
|
||||
|
||||
value = self.func(obj)
|
||||
obj_dict[name] = (value, now)
|
||||
return value
|
||||
|
||||
def __delete__(self, obj):
|
||||
obj.__dict__.pop(self.__name__, None)
|
||||
|
||||
def __set__(self, obj, value):
|
||||
obj.__dict__[self.__name__] = (value, time())
|
||||
|
||||
def _prepare_func(self, func):
|
||||
self.func = func
|
||||
if func:
|
||||
self.__doc__ = func.__doc__
|
||||
self.__name__ = func.__name__
|
||||
self.__module__ = func.__module__
|
||||
|
||||
|
||||
# Aliases to make cached_property_with_ttl easier to use
|
||||
cached_property_ttl = cached_property_with_ttl
|
||||
timed_cached_property = cached_property_with_ttl
|
||||
|
||||
|
||||
class threaded_cached_property_with_ttl(cached_property_with_ttl):
|
||||
"""
|
||||
A cached_property version for use in environments where multiple threads
|
||||
might concurrently try to access the property.
|
||||
"""
|
||||
|
||||
def __init__(self, ttl=None):
|
||||
super(threaded_cached_property_with_ttl, self).__init__(ttl)
|
||||
self.lock = threading.RLock()
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
with self.lock:
|
||||
return super(threaded_cached_property_with_ttl, self).__get__(obj, cls)
|
||||
|
||||
|
||||
# Alias to make threaded_cached_property_with_ttl easier to use
|
||||
threaded_cached_property_ttl = threaded_cached_property_with_ttl
|
||||
timed_threaded_cached_property = threaded_cached_property_with_ttl
|
||||
15
Lib/site-packages/pipenv/vendor/cerberus/LICENSE
vendored
Normal file
15
Lib/site-packages/pipenv/vendor/cerberus/LICENSE
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2012-2016 Nicola Iarocci.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
32
Lib/site-packages/pipenv/vendor/cerberus/__init__.py
vendored
Normal file
32
Lib/site-packages/pipenv/vendor/cerberus/__init__.py
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Extensible validation for Python dictionaries.
|
||||
|
||||
:copyright: 2012-2016 by Nicola Iarocci.
|
||||
:license: ISC, see LICENSE for more details.
|
||||
|
||||
Full documentation is available at http://python-cerberus.org/
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from pkg_resources import get_distribution, DistributionNotFound
|
||||
|
||||
from cerberus.validator import DocumentError, Validator
|
||||
from cerberus.schema import rules_set_registry, schema_registry, SchemaError
|
||||
from cerberus.utils import TypeDefinition
|
||||
|
||||
|
||||
try:
|
||||
__version__ = get_distribution("Cerberus").version
|
||||
except DistributionNotFound:
|
||||
__version__ = "unknown"
|
||||
|
||||
__all__ = [
|
||||
DocumentError.__name__,
|
||||
SchemaError.__name__,
|
||||
TypeDefinition.__name__,
|
||||
Validator.__name__,
|
||||
"schema_registry",
|
||||
"rules_set_registry",
|
||||
]
|
||||
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/errors.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/errors.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/platform.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/platform.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/schema.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/schema.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/utils.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/utils.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/validator.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/__pycache__/validator.cpython-38.pyc
vendored
Normal file
Binary file not shown.
634
Lib/site-packages/pipenv/vendor/cerberus/errors.py
vendored
Normal file
634
Lib/site-packages/pipenv/vendor/cerberus/errors.py
vendored
Normal file
@@ -0,0 +1,634 @@
|
||||
# -*-: coding utf-8 -*-
|
||||
""" This module contains the error-related constants and classes. """
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import defaultdict, namedtuple
|
||||
from copy import copy, deepcopy
|
||||
from functools import wraps
|
||||
from pprint import pformat
|
||||
|
||||
from cerberus.platform import PYTHON_VERSION, MutableMapping
|
||||
from cerberus.utils import compare_paths_lt, quote_string
|
||||
|
||||
|
||||
ErrorDefinition = namedtuple('ErrorDefinition', 'code, rule')
|
||||
"""
|
||||
This class is used to define possible errors. Each distinguishable error is
|
||||
defined by a *unique* error ``code`` as integer and the ``rule`` that can
|
||||
cause it as string.
|
||||
The instances' names do not contain a common prefix as they are supposed to be
|
||||
referenced within the module namespace, e.g. ``errors.CUSTOM``.
|
||||
"""
|
||||
|
||||
|
||||
# custom
|
||||
CUSTOM = ErrorDefinition(0x00, None)
|
||||
|
||||
# existence
|
||||
DOCUMENT_MISSING = ErrorDefinition(0x01, None) # issues/141
|
||||
DOCUMENT_MISSING = "document is missing"
|
||||
REQUIRED_FIELD = ErrorDefinition(0x02, 'required')
|
||||
UNKNOWN_FIELD = ErrorDefinition(0x03, None)
|
||||
DEPENDENCIES_FIELD = ErrorDefinition(0x04, 'dependencies')
|
||||
DEPENDENCIES_FIELD_VALUE = ErrorDefinition(0x05, 'dependencies')
|
||||
EXCLUDES_FIELD = ErrorDefinition(0x06, 'excludes')
|
||||
|
||||
# shape
|
||||
DOCUMENT_FORMAT = ErrorDefinition(0x21, None) # issues/141
|
||||
DOCUMENT_FORMAT = "'{0}' is not a document, must be a dict"
|
||||
EMPTY_NOT_ALLOWED = ErrorDefinition(0x22, 'empty')
|
||||
NOT_NULLABLE = ErrorDefinition(0x23, 'nullable')
|
||||
BAD_TYPE = ErrorDefinition(0x24, 'type')
|
||||
BAD_TYPE_FOR_SCHEMA = ErrorDefinition(0x25, 'schema')
|
||||
ITEMS_LENGTH = ErrorDefinition(0x26, 'items')
|
||||
MIN_LENGTH = ErrorDefinition(0x27, 'minlength')
|
||||
MAX_LENGTH = ErrorDefinition(0x28, 'maxlength')
|
||||
|
||||
|
||||
# color
|
||||
REGEX_MISMATCH = ErrorDefinition(0x41, 'regex')
|
||||
MIN_VALUE = ErrorDefinition(0x42, 'min')
|
||||
MAX_VALUE = ErrorDefinition(0x43, 'max')
|
||||
UNALLOWED_VALUE = ErrorDefinition(0x44, 'allowed')
|
||||
UNALLOWED_VALUES = ErrorDefinition(0x45, 'allowed')
|
||||
FORBIDDEN_VALUE = ErrorDefinition(0x46, 'forbidden')
|
||||
FORBIDDEN_VALUES = ErrorDefinition(0x47, 'forbidden')
|
||||
MISSING_MEMBERS = ErrorDefinition(0x48, 'contains')
|
||||
|
||||
# other
|
||||
NORMALIZATION = ErrorDefinition(0x60, None)
|
||||
COERCION_FAILED = ErrorDefinition(0x61, 'coerce')
|
||||
RENAMING_FAILED = ErrorDefinition(0x62, 'rename_handler')
|
||||
READONLY_FIELD = ErrorDefinition(0x63, 'readonly')
|
||||
SETTING_DEFAULT_FAILED = ErrorDefinition(0x64, 'default_setter')
|
||||
|
||||
# groups
|
||||
ERROR_GROUP = ErrorDefinition(0x80, None)
|
||||
MAPPING_SCHEMA = ErrorDefinition(0x81, 'schema')
|
||||
SEQUENCE_SCHEMA = ErrorDefinition(0x82, 'schema')
|
||||
# TODO remove KEYSCHEMA AND VALUESCHEMA with next major release
|
||||
KEYSRULES = KEYSCHEMA = ErrorDefinition(0x83, 'keysrules')
|
||||
VALUESRULES = VALUESCHEMA = ErrorDefinition(0x84, 'valuesrules')
|
||||
BAD_ITEMS = ErrorDefinition(0x8F, 'items')
|
||||
|
||||
LOGICAL = ErrorDefinition(0x90, None)
|
||||
NONEOF = ErrorDefinition(0x91, 'noneof')
|
||||
ONEOF = ErrorDefinition(0x92, 'oneof')
|
||||
ANYOF = ErrorDefinition(0x93, 'anyof')
|
||||
ALLOF = ErrorDefinition(0x94, 'allof')
|
||||
|
||||
|
||||
""" SchemaError messages """
|
||||
|
||||
SCHEMA_ERROR_DEFINITION_TYPE = "schema definition for field '{0}' must be a dict"
|
||||
SCHEMA_ERROR_MISSING = "validation schema missing"
|
||||
|
||||
|
||||
""" Error representations """
|
||||
|
||||
|
||||
class ValidationError(object):
|
||||
""" A simple class to store and query basic error information. """
|
||||
|
||||
def __init__(self, document_path, schema_path, code, rule, constraint, value, info):
|
||||
self.document_path = document_path
|
||||
""" The path to the field within the document that caused the error.
|
||||
Type: :class:`tuple` """
|
||||
self.schema_path = schema_path
|
||||
""" The path to the rule within the schema that caused the error.
|
||||
Type: :class:`tuple` """
|
||||
self.code = code
|
||||
""" The error's identifier code. Type: :class:`int` """
|
||||
self.rule = rule
|
||||
""" The rule that failed. Type: `string` """
|
||||
self.constraint = constraint
|
||||
""" The constraint that failed. """
|
||||
self.value = value
|
||||
""" The value that failed. """
|
||||
self.info = info
|
||||
""" May hold additional information about the error.
|
||||
Type: :class:`tuple` """
|
||||
|
||||
def __eq__(self, other):
|
||||
""" Assumes the errors relate to the same document and schema. """
|
||||
return hash(self) == hash(other)
|
||||
|
||||
def __hash__(self):
|
||||
""" Expects that all other properties are transitively determined. """
|
||||
return hash(self.document_path) ^ hash(self.schema_path) ^ hash(self.code)
|
||||
|
||||
def __lt__(self, other):
|
||||
if self.document_path != other.document_path:
|
||||
return compare_paths_lt(self.document_path, other.document_path)
|
||||
else:
|
||||
return compare_paths_lt(self.schema_path, other.schema_path)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"{class_name} @ {memptr} ( "
|
||||
"document_path={document_path},"
|
||||
"schema_path={schema_path},"
|
||||
"code={code},"
|
||||
"constraint={constraint},"
|
||||
"value={value},"
|
||||
"info={info} )".format(
|
||||
class_name=self.__class__.__name__,
|
||||
memptr=hex(id(self)), # noqa: E501
|
||||
document_path=self.document_path,
|
||||
schema_path=self.schema_path,
|
||||
code=hex(self.code),
|
||||
constraint=quote_string(self.constraint),
|
||||
value=quote_string(self.value),
|
||||
info=self.info,
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def child_errors(self):
|
||||
"""
|
||||
A list that contains the individual errors of a bulk validation error.
|
||||
"""
|
||||
return self.info[0] if self.is_group_error else None
|
||||
|
||||
@property
|
||||
def definitions_errors(self):
|
||||
""" Dictionary with errors of an *of-rule mapped to the index of the
|
||||
definition it occurred in. Returns :obj:`None` if not applicable.
|
||||
"""
|
||||
if not self.is_logic_error:
|
||||
return None
|
||||
|
||||
result = defaultdict(list)
|
||||
for error in self.child_errors:
|
||||
i = error.schema_path[len(self.schema_path)]
|
||||
result[i].append(error)
|
||||
return result
|
||||
|
||||
@property
|
||||
def field(self):
|
||||
""" Field of the contextual mapping, possibly :obj:`None`. """
|
||||
if self.document_path:
|
||||
return self.document_path[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_group_error(self):
|
||||
""" ``True`` for errors of bulk validations. """
|
||||
return bool(self.code & ERROR_GROUP.code)
|
||||
|
||||
@property
|
||||
def is_logic_error(self):
|
||||
""" ``True`` for validation errors against different schemas with
|
||||
*of-rules. """
|
||||
return bool(self.code & LOGICAL.code - ERROR_GROUP.code)
|
||||
|
||||
@property
|
||||
def is_normalization_error(self):
|
||||
""" ``True`` for normalization errors. """
|
||||
return bool(self.code & NORMALIZATION.code)
|
||||
|
||||
|
||||
class ErrorList(list):
|
||||
""" A list for :class:`~cerberus.errors.ValidationError` instances that
|
||||
can be queried with the ``in`` keyword for a particular
|
||||
:class:`~cerberus.errors.ErrorDefinition`. """
|
||||
|
||||
def __contains__(self, error_definition):
|
||||
if not isinstance(error_definition, ErrorDefinition):
|
||||
raise TypeError
|
||||
|
||||
wanted_code = error_definition.code
|
||||
return any(x.code == wanted_code for x in self)
|
||||
|
||||
|
||||
class ErrorTreeNode(MutableMapping):
|
||||
__slots__ = ('descendants', 'errors', 'parent_node', 'path', 'tree_root')
|
||||
|
||||
def __init__(self, path, parent_node):
|
||||
self.parent_node = parent_node
|
||||
self.tree_root = self.parent_node.tree_root
|
||||
self.path = path[: self.parent_node.depth + 1]
|
||||
self.errors = ErrorList()
|
||||
self.descendants = {}
|
||||
|
||||
def __contains__(self, item):
|
||||
if isinstance(item, ErrorDefinition):
|
||||
return item in self.errors
|
||||
else:
|
||||
return item in self.descendants
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.descendants[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.errors)
|
||||
|
||||
def __getitem__(self, item):
|
||||
if isinstance(item, ErrorDefinition):
|
||||
for error in self.errors:
|
||||
if item.code == error.code:
|
||||
return error
|
||||
return None
|
||||
else:
|
||||
return self.descendants.get(item)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.errors)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.descendants[key] = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.errors) + ',' + str(self.descendants)
|
||||
|
||||
@property
|
||||
def depth(self):
|
||||
return len(self.path)
|
||||
|
||||
@property
|
||||
def tree_type(self):
|
||||
return self.tree_root.tree_type
|
||||
|
||||
def add(self, error):
|
||||
error_path = self._path_of_(error)
|
||||
|
||||
key = error_path[self.depth]
|
||||
if key not in self.descendants:
|
||||
self[key] = ErrorTreeNode(error_path, self)
|
||||
|
||||
node = self[key]
|
||||
|
||||
if len(error_path) == self.depth + 1:
|
||||
node.errors.append(error)
|
||||
node.errors.sort()
|
||||
if error.is_group_error:
|
||||
for child_error in error.child_errors:
|
||||
self.tree_root.add(child_error)
|
||||
else:
|
||||
node.add(error)
|
||||
|
||||
def _path_of_(self, error):
|
||||
return getattr(error, self.tree_type + '_path')
|
||||
|
||||
|
||||
class ErrorTree(ErrorTreeNode):
|
||||
""" Base class for :class:`~cerberus.errors.DocumentErrorTree` and
|
||||
:class:`~cerberus.errors.SchemaErrorTree`. """
|
||||
|
||||
def __init__(self, errors=()):
|
||||
self.parent_node = None
|
||||
self.tree_root = self
|
||||
self.path = ()
|
||||
self.errors = ErrorList()
|
||||
self.descendants = {}
|
||||
for error in errors:
|
||||
self.add(error)
|
||||
|
||||
def add(self, error):
|
||||
""" Add an error to the tree.
|
||||
|
||||
:param error: :class:`~cerberus.errors.ValidationError`
|
||||
"""
|
||||
if not self._path_of_(error):
|
||||
self.errors.append(error)
|
||||
self.errors.sort()
|
||||
else:
|
||||
super(ErrorTree, self).add(error)
|
||||
|
||||
def fetch_errors_from(self, path):
|
||||
""" Returns all errors for a particular path.
|
||||
|
||||
:param path: :class:`tuple` of :term:`hashable` s.
|
||||
:rtype: :class:`~cerberus.errors.ErrorList`
|
||||
"""
|
||||
node = self.fetch_node_from(path)
|
||||
if node is not None:
|
||||
return node.errors
|
||||
else:
|
||||
return ErrorList()
|
||||
|
||||
def fetch_node_from(self, path):
|
||||
""" Returns a node for a path.
|
||||
|
||||
:param path: Tuple of :term:`hashable` s.
|
||||
:rtype: :class:`~cerberus.errors.ErrorTreeNode` or :obj:`None`
|
||||
"""
|
||||
context = self
|
||||
for key in path:
|
||||
context = context[key]
|
||||
if context is None:
|
||||
break
|
||||
return context
|
||||
|
||||
|
||||
class DocumentErrorTree(ErrorTree):
|
||||
""" Implements a dict-like class to query errors by indexes following the
|
||||
structure of a validated document. """
|
||||
|
||||
tree_type = 'document'
|
||||
|
||||
|
||||
class SchemaErrorTree(ErrorTree):
|
||||
""" Implements a dict-like class to query errors by indexes following the
|
||||
structure of the used schema. """
|
||||
|
||||
tree_type = 'schema'
|
||||
|
||||
|
||||
class BaseErrorHandler(object):
|
||||
""" Base class for all error handlers.
|
||||
Subclasses are identified as error-handlers with an instance-test. """
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
""" Optionally initialize a new instance. """
|
||||
pass
|
||||
|
||||
def __call__(self, errors):
|
||||
""" Returns errors in a handler-specific format.
|
||||
|
||||
:param errors: An object containing the errors.
|
||||
:type errors: :term:`iterable` of
|
||||
:class:`~cerberus.errors.ValidationError` instances or a
|
||||
:class:`~cerberus.Validator` instance
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __iter__(self):
|
||||
""" Be a superhero and implement an iterator over errors. """
|
||||
raise NotImplementedError
|
||||
|
||||
def add(self, error):
|
||||
""" Add an error to the errors' container object of a handler.
|
||||
|
||||
:param error: The error to add.
|
||||
:type error: :class:`~cerberus.errors.ValidationError`
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def emit(self, error):
|
||||
""" Optionally emits an error in the handler's format to a stream.
|
||||
Or light a LED, or even shut down a power plant.
|
||||
|
||||
:param error: The error to emit.
|
||||
:type error: :class:`~cerberus.errors.ValidationError`
|
||||
"""
|
||||
pass
|
||||
|
||||
def end(self, validator):
|
||||
""" Gets called when a validation ends.
|
||||
|
||||
:param validator: The calling validator.
|
||||
:type validator: :class:`~cerberus.Validator` """
|
||||
pass
|
||||
|
||||
def extend(self, errors):
|
||||
""" Adds all errors to the handler's container object.
|
||||
|
||||
:param errors: The errors to add.
|
||||
:type errors: :term:`iterable` of
|
||||
:class:`~cerberus.errors.ValidationError` instances
|
||||
"""
|
||||
for error in errors:
|
||||
self.add(error)
|
||||
|
||||
def start(self, validator):
|
||||
""" Gets called when a validation starts.
|
||||
|
||||
:param validator: The calling validator.
|
||||
:type validator: :class:`~cerberus.Validator`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ToyErrorHandler(BaseErrorHandler):
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise RuntimeError('This is not supposed to happen.')
|
||||
|
||||
def clear(self):
|
||||
pass
|
||||
|
||||
|
||||
def encode_unicode(f):
|
||||
"""Cerberus error messages expect regular binary strings.
|
||||
If unicode is used in a ValidationError message can't be printed.
|
||||
|
||||
This decorator ensures that if legacy Python is used unicode
|
||||
strings are encoded before passing to a function.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrapped(obj, error):
|
||||
def _encode(value):
|
||||
"""Helper encoding unicode strings into binary utf-8"""
|
||||
if isinstance(value, unicode): # noqa: F821
|
||||
return value.encode('utf-8')
|
||||
return value
|
||||
|
||||
error = copy(error)
|
||||
error.document_path = _encode(error.document_path)
|
||||
error.schema_path = _encode(error.schema_path)
|
||||
error.constraint = _encode(error.constraint)
|
||||
error.value = _encode(error.value)
|
||||
error.info = _encode(error.info)
|
||||
return f(obj, error)
|
||||
|
||||
return wrapped if PYTHON_VERSION < 3 else f
|
||||
|
||||
|
||||
class BasicErrorHandler(BaseErrorHandler):
|
||||
""" Models cerberus' legacy. Returns a :class:`dict`. When mangled
|
||||
through :class:`str` a pretty-formatted representation of that
|
||||
tree is returned.
|
||||
"""
|
||||
|
||||
messages = {
|
||||
0x00: "{0}",
|
||||
0x01: "document is missing",
|
||||
0x02: "required field",
|
||||
0x03: "unknown field",
|
||||
0x04: "field '{0}' is required",
|
||||
0x05: "depends on these values: {constraint}",
|
||||
0x06: "{0} must not be present with '{field}'",
|
||||
0x21: "'{0}' is not a document, must be a dict",
|
||||
0x22: "empty values not allowed",
|
||||
0x23: "null value not allowed",
|
||||
0x24: "must be of {constraint} type",
|
||||
0x25: "must be of dict type",
|
||||
0x26: "length of list should be {0}, it is {1}",
|
||||
0x27: "min length is {constraint}",
|
||||
0x28: "max length is {constraint}",
|
||||
0x41: "value does not match regex '{constraint}'",
|
||||
0x42: "min value is {constraint}",
|
||||
0x43: "max value is {constraint}",
|
||||
0x44: "unallowed value {value}",
|
||||
0x45: "unallowed values {0}",
|
||||
0x46: "unallowed value {value}",
|
||||
0x47: "unallowed values {0}",
|
||||
0x48: "missing members {0}",
|
||||
0x61: "field '{field}' cannot be coerced: {0}",
|
||||
0x62: "field '{field}' cannot be renamed: {0}",
|
||||
0x63: "field is read-only",
|
||||
0x64: "default value for '{field}' cannot be set: {0}",
|
||||
0x81: "mapping doesn't validate subschema: {0}",
|
||||
0x82: "one or more sequence-items don't validate: {0}",
|
||||
0x83: "one or more keys of a mapping don't validate: {0}",
|
||||
0x84: "one or more values in a mapping don't validate: {0}",
|
||||
0x85: "one or more sequence-items don't validate: {0}",
|
||||
0x91: "one or more definitions validate",
|
||||
0x92: "none or more than one rule validate",
|
||||
0x93: "no definitions validate",
|
||||
0x94: "one or more definitions don't validate",
|
||||
}
|
||||
|
||||
def __init__(self, tree=None):
|
||||
self.tree = {} if tree is None else tree
|
||||
|
||||
def __call__(self, errors):
|
||||
self.clear()
|
||||
self.extend(errors)
|
||||
return self.pretty_tree
|
||||
|
||||
def __str__(self):
|
||||
return pformat(self.pretty_tree)
|
||||
|
||||
@property
|
||||
def pretty_tree(self):
|
||||
pretty = deepcopy(self.tree)
|
||||
for field in pretty:
|
||||
self._purge_empty_dicts(pretty[field])
|
||||
return pretty
|
||||
|
||||
@encode_unicode
|
||||
def add(self, error):
|
||||
# Make sure the original error is not altered with
|
||||
# error paths specific to the handler.
|
||||
error = deepcopy(error)
|
||||
|
||||
self._rewrite_error_path(error)
|
||||
|
||||
if error.is_logic_error:
|
||||
self._insert_logic_error(error)
|
||||
elif error.is_group_error:
|
||||
self._insert_group_error(error)
|
||||
elif error.code in self.messages:
|
||||
self._insert_error(
|
||||
error.document_path, self._format_message(error.field, error)
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
self.tree = {}
|
||||
|
||||
def start(self, validator):
|
||||
self.clear()
|
||||
|
||||
def _format_message(self, field, error):
|
||||
return self.messages[error.code].format(
|
||||
*error.info, constraint=error.constraint, field=field, value=error.value
|
||||
)
|
||||
|
||||
def _insert_error(self, path, node):
|
||||
""" Adds an error or sub-tree to :attr:tree.
|
||||
|
||||
:param path: Path to the error.
|
||||
:type path: Tuple of strings and integers.
|
||||
:param node: An error message or a sub-tree.
|
||||
:type node: String or dictionary.
|
||||
"""
|
||||
field = path[0]
|
||||
if len(path) == 1:
|
||||
if field in self.tree:
|
||||
subtree = self.tree[field].pop()
|
||||
self.tree[field] += [node, subtree]
|
||||
else:
|
||||
self.tree[field] = [node, {}]
|
||||
elif len(path) >= 1:
|
||||
if field not in self.tree:
|
||||
self.tree[field] = [{}]
|
||||
subtree = self.tree[field][-1]
|
||||
|
||||
if subtree:
|
||||
new = self.__class__(tree=copy(subtree))
|
||||
else:
|
||||
new = self.__class__()
|
||||
new._insert_error(path[1:], node)
|
||||
subtree.update(new.tree)
|
||||
|
||||
def _insert_group_error(self, error):
|
||||
for child_error in error.child_errors:
|
||||
if child_error.is_logic_error:
|
||||
self._insert_logic_error(child_error)
|
||||
elif child_error.is_group_error:
|
||||
self._insert_group_error(child_error)
|
||||
else:
|
||||
self._insert_error(
|
||||
child_error.document_path,
|
||||
self._format_message(child_error.field, child_error),
|
||||
)
|
||||
|
||||
def _insert_logic_error(self, error):
|
||||
field = error.field
|
||||
self._insert_error(error.document_path, self._format_message(field, error))
|
||||
|
||||
for definition_errors in error.definitions_errors.values():
|
||||
for child_error in definition_errors:
|
||||
if child_error.is_logic_error:
|
||||
self._insert_logic_error(child_error)
|
||||
elif child_error.is_group_error:
|
||||
self._insert_group_error(child_error)
|
||||
else:
|
||||
self._insert_error(
|
||||
child_error.document_path,
|
||||
self._format_message(field, child_error),
|
||||
)
|
||||
|
||||
def _purge_empty_dicts(self, error_list):
|
||||
subtree = error_list[-1]
|
||||
if not error_list[-1]:
|
||||
error_list.pop()
|
||||
else:
|
||||
for key in subtree:
|
||||
self._purge_empty_dicts(subtree[key])
|
||||
|
||||
def _rewrite_error_path(self, error, offset=0):
|
||||
"""
|
||||
Recursively rewrites the error path to correctly represent logic errors
|
||||
"""
|
||||
if error.is_logic_error:
|
||||
self._rewrite_logic_error_path(error, offset)
|
||||
elif error.is_group_error:
|
||||
self._rewrite_group_error_path(error, offset)
|
||||
|
||||
def _rewrite_group_error_path(self, error, offset=0):
|
||||
child_start = len(error.document_path) - offset
|
||||
|
||||
for child_error in error.child_errors:
|
||||
relative_path = child_error.document_path[child_start:]
|
||||
child_error.document_path = error.document_path + relative_path
|
||||
|
||||
self._rewrite_error_path(child_error, offset)
|
||||
|
||||
def _rewrite_logic_error_path(self, error, offset=0):
|
||||
child_start = len(error.document_path) - offset
|
||||
|
||||
for i, definition_errors in error.definitions_errors.items():
|
||||
if not definition_errors:
|
||||
continue
|
||||
|
||||
nodename = '%s definition %s' % (error.rule, i)
|
||||
path = error.document_path + (nodename,)
|
||||
|
||||
for child_error in definition_errors:
|
||||
rel_path = child_error.document_path[child_start:]
|
||||
child_error.document_path = path + rel_path
|
||||
|
||||
self._rewrite_error_path(child_error, offset + 1)
|
||||
|
||||
|
||||
class SchemaErrorHandler(BasicErrorHandler):
|
||||
messages = BasicErrorHandler.messages.copy()
|
||||
messages[0x03] = "unknown rule"
|
||||
40
Lib/site-packages/pipenv/vendor/cerberus/platform.py
vendored
Normal file
40
Lib/site-packages/pipenv/vendor/cerberus/platform.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
""" Platform-dependent objects """
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
PYTHON_VERSION = float(sys.version_info[0]) + float(sys.version_info[1]) / 10
|
||||
|
||||
|
||||
if PYTHON_VERSION < 3:
|
||||
_str_type = basestring # noqa: F821
|
||||
_int_types = (int, long) # noqa: F821
|
||||
else:
|
||||
_str_type = str
|
||||
_int_types = (int,)
|
||||
|
||||
|
||||
if PYTHON_VERSION < 3.3:
|
||||
from collections import ( # noqa: F401
|
||||
Callable,
|
||||
Container,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
Set,
|
||||
Sized,
|
||||
)
|
||||
else:
|
||||
from collections.abc import ( # noqa: F401
|
||||
Callable,
|
||||
Container,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
Set,
|
||||
Sized,
|
||||
)
|
||||
536
Lib/site-packages/pipenv/vendor/cerberus/schema.py
vendored
Normal file
536
Lib/site-packages/pipenv/vendor/cerberus/schema.py
vendored
Normal file
@@ -0,0 +1,536 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from copy import copy
|
||||
from warnings import warn
|
||||
|
||||
from cerberus import errors
|
||||
from cerberus.platform import (
|
||||
_str_type,
|
||||
Callable,
|
||||
Hashable,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
)
|
||||
from cerberus.utils import (
|
||||
get_Validator_class,
|
||||
validator_factory,
|
||||
mapping_hash,
|
||||
TypeDefinition,
|
||||
)
|
||||
|
||||
|
||||
class _Abort(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SchemaError(Exception):
|
||||
""" Raised when the validation schema is missing, has the wrong format or
|
||||
contains errors. """
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DefinitionSchema(MutableMapping):
|
||||
""" A dict-subclass for caching of validated schemas. """
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if 'SchemaValidator' not in globals():
|
||||
global SchemaValidator
|
||||
SchemaValidator = validator_factory('SchemaValidator', SchemaValidatorMixin)
|
||||
types_mapping = SchemaValidator.types_mapping.copy()
|
||||
types_mapping.update(
|
||||
{
|
||||
'callable': TypeDefinition('callable', (Callable,), ()),
|
||||
'hashable': TypeDefinition('hashable', (Hashable,), ()),
|
||||
}
|
||||
)
|
||||
SchemaValidator.types_mapping = types_mapping
|
||||
|
||||
return super(DefinitionSchema, cls).__new__(cls)
|
||||
|
||||
def __init__(self, validator, schema):
|
||||
"""
|
||||
:param validator: An instance of Validator-(sub-)class that uses this
|
||||
schema.
|
||||
:param schema: A definition-schema as ``dict``. Defaults to an empty
|
||||
one.
|
||||
"""
|
||||
if not isinstance(validator, get_Validator_class()):
|
||||
raise RuntimeError('validator argument must be a Validator-' 'instance.')
|
||||
self.validator = validator
|
||||
|
||||
if isinstance(schema, _str_type):
|
||||
schema = validator.schema_registry.get(schema, schema)
|
||||
|
||||
if not isinstance(schema, Mapping):
|
||||
try:
|
||||
schema = dict(schema)
|
||||
except Exception:
|
||||
raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema))
|
||||
|
||||
self.validation_schema = SchemaValidationSchema(validator)
|
||||
self.schema_validator = SchemaValidator(
|
||||
None,
|
||||
allow_unknown=self.validation_schema,
|
||||
error_handler=errors.SchemaErrorHandler,
|
||||
target_schema=schema,
|
||||
target_validator=validator,
|
||||
)
|
||||
|
||||
schema = self.expand(schema)
|
||||
self.validate(schema)
|
||||
self.schema = schema
|
||||
|
||||
def __delitem__(self, key):
|
||||
_new_schema = self.schema.copy()
|
||||
try:
|
||||
del _new_schema[key]
|
||||
except ValueError:
|
||||
raise SchemaError("Schema has no field '%s' defined" % key)
|
||||
except Exception as e:
|
||||
raise e
|
||||
else:
|
||||
del self.schema[key]
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.schema[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.schema)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.schema)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
value = self.expand({0: value})[0]
|
||||
self.validate({key: value})
|
||||
self.schema[key] = value
|
||||
|
||||
def __str__(self):
|
||||
if hasattr(self, "schema"):
|
||||
return str(self.schema)
|
||||
else:
|
||||
return "No schema data is set yet."
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self.validator, self.schema.copy())
|
||||
|
||||
@classmethod
|
||||
def expand(cls, schema):
|
||||
try:
|
||||
schema = cls._expand_logical_shortcuts(schema)
|
||||
schema = cls._expand_subschemas(schema)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# TODO remove this with the next major release
|
||||
schema = cls._rename_deprecated_rulenames(schema)
|
||||
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def _expand_logical_shortcuts(cls, schema):
|
||||
""" Expand agglutinated rules in a definition-schema.
|
||||
|
||||
:param schema: The schema-definition to expand.
|
||||
:return: The expanded schema-definition.
|
||||
"""
|
||||
|
||||
def is_of_rule(x):
|
||||
return isinstance(x, _str_type) and x.startswith(
|
||||
('allof_', 'anyof_', 'noneof_', 'oneof_')
|
||||
)
|
||||
|
||||
for field, rules in schema.items():
|
||||
for of_rule in [x for x in rules if is_of_rule(x)]:
|
||||
operator, rule = of_rule.split('_', 1)
|
||||
rules.update({operator: []})
|
||||
for value in rules[of_rule]:
|
||||
rules[operator].append({rule: value})
|
||||
del rules[of_rule]
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def _expand_subschemas(cls, schema):
|
||||
def has_schema_rule():
|
||||
return isinstance(schema[field], Mapping) and 'schema' in schema[field]
|
||||
|
||||
def has_mapping_schema():
|
||||
""" Tries to determine heuristically if the schema-constraints are
|
||||
aimed to mappings. """
|
||||
try:
|
||||
return all(
|
||||
isinstance(x, Mapping) for x in schema[field]['schema'].values()
|
||||
)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
for field in schema:
|
||||
if not has_schema_rule():
|
||||
pass
|
||||
elif has_mapping_schema():
|
||||
schema[field]['schema'] = cls.expand(schema[field]['schema'])
|
||||
else: # assumes schema-constraints for a sequence
|
||||
schema[field]['schema'] = cls.expand({0: schema[field]['schema']})[0]
|
||||
|
||||
# TODO remove the last two values in the tuple with the next major release
|
||||
for rule in ('keysrules', 'valuesrules', 'keyschema', 'valueschema'):
|
||||
if rule in schema[field]:
|
||||
schema[field][rule] = cls.expand({0: schema[field][rule]})[0]
|
||||
|
||||
for rule in ('allof', 'anyof', 'items', 'noneof', 'oneof'):
|
||||
if rule in schema[field]:
|
||||
if not isinstance(schema[field][rule], Sequence):
|
||||
continue
|
||||
new_rules_definition = []
|
||||
for item in schema[field][rule]:
|
||||
new_rules_definition.append(cls.expand({0: item})[0])
|
||||
schema[field][rule] = new_rules_definition
|
||||
return schema
|
||||
|
||||
def get(self, item, default=None):
|
||||
return self.schema.get(item, default)
|
||||
|
||||
def items(self):
|
||||
return self.schema.items()
|
||||
|
||||
def update(self, schema):
|
||||
try:
|
||||
schema = self.expand(schema)
|
||||
_new_schema = self.schema.copy()
|
||||
_new_schema.update(schema)
|
||||
self.validate(_new_schema)
|
||||
except ValueError:
|
||||
raise SchemaError(errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema))
|
||||
except Exception as e:
|
||||
raise e
|
||||
else:
|
||||
self.schema = _new_schema
|
||||
|
||||
# TODO remove with next major release
|
||||
@staticmethod
|
||||
def _rename_deprecated_rulenames(schema):
|
||||
for field, rules in schema.items():
|
||||
|
||||
if isinstance(rules, str): # registry reference
|
||||
continue
|
||||
|
||||
for old, new in (
|
||||
('keyschema', 'keysrules'),
|
||||
('validator', 'check_with'),
|
||||
('valueschema', 'valuesrules'),
|
||||
):
|
||||
|
||||
if old not in rules:
|
||||
continue
|
||||
|
||||
if new in rules:
|
||||
raise RuntimeError(
|
||||
"The rule '{new}' is also present with its old "
|
||||
"name '{old}' in the same set of rules."
|
||||
)
|
||||
|
||||
warn(
|
||||
"The rule '{old}' was renamed to '{new}'. The old name will "
|
||||
"not be available in the next major release of "
|
||||
"Cerberus.".format(old=old, new=new),
|
||||
DeprecationWarning,
|
||||
)
|
||||
schema[field][new] = schema[field][old]
|
||||
schema[field].pop(old)
|
||||
|
||||
return schema
|
||||
|
||||
def regenerate_validation_schema(self):
|
||||
self.validation_schema = SchemaValidationSchema(self.validator)
|
||||
|
||||
def validate(self, schema=None):
|
||||
""" Validates a schema that defines rules against supported rules.
|
||||
|
||||
:param schema: The schema to be validated as a legal cerberus schema
|
||||
according to the rules of the assigned Validator object.
|
||||
Raises a :class:`~cerberus.base.SchemaError` when an invalid
|
||||
schema is encountered. """
|
||||
if schema is None:
|
||||
schema = self.schema
|
||||
_hash = (mapping_hash(schema), mapping_hash(self.validator.types_mapping))
|
||||
if _hash not in self.validator._valid_schemas:
|
||||
self._validate(schema)
|
||||
self.validator._valid_schemas.add(_hash)
|
||||
|
||||
def _validate(self, schema):
|
||||
if isinstance(schema, _str_type):
|
||||
schema = self.validator.schema_registry.get(schema, schema)
|
||||
|
||||
if schema is None:
|
||||
raise SchemaError(errors.SCHEMA_ERROR_MISSING)
|
||||
|
||||
schema = copy(schema)
|
||||
for field in schema:
|
||||
if isinstance(schema[field], _str_type):
|
||||
schema[field] = rules_set_registry.get(schema[field], schema[field])
|
||||
|
||||
if not self.schema_validator(schema, normalize=False):
|
||||
raise SchemaError(self.schema_validator.errors)
|
||||
|
||||
|
||||
class UnvalidatedSchema(DefinitionSchema):
|
||||
def __init__(self, schema={}):
|
||||
if not isinstance(schema, Mapping):
|
||||
schema = dict(schema)
|
||||
self.schema = schema
|
||||
|
||||
def validate(self, schema):
|
||||
pass
|
||||
|
||||
def copy(self):
|
||||
# Override ancestor's copy, because
|
||||
# UnvalidatedSchema does not have .validator:
|
||||
return self.__class__(self.schema.copy())
|
||||
|
||||
|
||||
class SchemaValidationSchema(UnvalidatedSchema):
|
||||
def __init__(self, validator):
|
||||
self.schema = {
|
||||
'allow_unknown': False,
|
||||
'schema': validator.rules,
|
||||
'type': 'dict',
|
||||
}
|
||||
|
||||
|
||||
class SchemaValidatorMixin(object):
|
||||
""" This validator mixin provides mechanics to validate schemas passed to a Cerberus
|
||||
validator. """
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('known_rules_set_refs', set())
|
||||
kwargs.setdefault('known_schema_refs', set())
|
||||
super(SchemaValidatorMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def known_rules_set_refs(self):
|
||||
""" The encountered references to rules set registry items. """
|
||||
return self._config['known_rules_set_refs']
|
||||
|
||||
@property
|
||||
def known_schema_refs(self):
|
||||
""" The encountered references to schema registry items. """
|
||||
return self._config['known_schema_refs']
|
||||
|
||||
@property
|
||||
def target_schema(self):
|
||||
""" The schema that is being validated. """
|
||||
return self._config['target_schema']
|
||||
|
||||
@property
|
||||
def target_validator(self):
|
||||
""" The validator whose schema is being validated. """
|
||||
return self._config['target_validator']
|
||||
|
||||
def _check_with_bulk_schema(self, field, value):
|
||||
# resolve schema registry reference
|
||||
if isinstance(value, _str_type):
|
||||
if value in self.known_rules_set_refs:
|
||||
return
|
||||
else:
|
||||
self.known_rules_set_refs.add(value)
|
||||
definition = self.target_validator.rules_set_registry.get(value)
|
||||
if definition is None:
|
||||
self._error(field, 'Rules set definition %s not found.' % value)
|
||||
return
|
||||
else:
|
||||
value = definition
|
||||
|
||||
_hash = (
|
||||
mapping_hash({'turing': value}),
|
||||
mapping_hash(self.target_validator.types_mapping),
|
||||
)
|
||||
if _hash in self.target_validator._valid_schemas:
|
||||
return
|
||||
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=field,
|
||||
allow_unknown=False,
|
||||
schema=self.target_validator.rules,
|
||||
)
|
||||
validator(value, normalize=False)
|
||||
if validator._errors:
|
||||
self._error(validator._errors)
|
||||
else:
|
||||
self.target_validator._valid_schemas.add(_hash)
|
||||
|
||||
def _check_with_dependencies(self, field, value):
|
||||
if isinstance(value, _str_type):
|
||||
pass
|
||||
elif isinstance(value, Mapping):
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=field,
|
||||
schema={'valuesrules': {'type': 'list'}},
|
||||
allow_unknown=True,
|
||||
)
|
||||
if not validator(value, normalize=False):
|
||||
self._error(validator._errors)
|
||||
elif isinstance(value, Sequence):
|
||||
if not all(isinstance(x, Hashable) for x in value):
|
||||
path = self.document_path + (field,)
|
||||
self._error(path, 'All dependencies must be a hashable type.')
|
||||
|
||||
def _check_with_items(self, field, value):
|
||||
for i, schema in enumerate(value):
|
||||
self._check_with_bulk_schema((field, i), schema)
|
||||
|
||||
def _check_with_schema(self, field, value):
|
||||
try:
|
||||
value = self._handle_schema_reference_for_validator(field, value)
|
||||
except _Abort:
|
||||
return
|
||||
|
||||
_hash = (mapping_hash(value), mapping_hash(self.target_validator.types_mapping))
|
||||
if _hash in self.target_validator._valid_schemas:
|
||||
return
|
||||
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=field, schema=None, allow_unknown=self.root_allow_unknown
|
||||
)
|
||||
validator(self._expand_rules_set_refs(value), normalize=False)
|
||||
if validator._errors:
|
||||
self._error(validator._errors)
|
||||
else:
|
||||
self.target_validator._valid_schemas.add(_hash)
|
||||
|
||||
def _check_with_type(self, field, value):
|
||||
value = set((value,)) if isinstance(value, _str_type) else set(value)
|
||||
invalid_constraints = value - set(self.target_validator.types)
|
||||
if invalid_constraints:
|
||||
self._error(
|
||||
field, 'Unsupported types: {}'.format(', '.join(invalid_constraints))
|
||||
)
|
||||
|
||||
def _expand_rules_set_refs(self, schema):
|
||||
result = {}
|
||||
for k, v in schema.items():
|
||||
if isinstance(v, _str_type):
|
||||
result[k] = self.target_validator.rules_set_registry.get(v)
|
||||
else:
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def _handle_schema_reference_for_validator(self, field, value):
|
||||
if not isinstance(value, _str_type):
|
||||
return value
|
||||
if value in self.known_schema_refs:
|
||||
raise _Abort
|
||||
|
||||
self.known_schema_refs.add(value)
|
||||
definition = self.target_validator.schema_registry.get(value)
|
||||
if definition is None:
|
||||
path = self.document_path + (field,)
|
||||
self._error(path, 'Schema definition {} not found.'.format(value))
|
||||
raise _Abort
|
||||
return definition
|
||||
|
||||
def _validate_logical(self, rule, field, value):
|
||||
""" {'allowed': ('allof', 'anyof', 'noneof', 'oneof')} """
|
||||
if not isinstance(value, Sequence):
|
||||
self._error(field, errors.BAD_TYPE)
|
||||
return
|
||||
|
||||
validator = self._get_child_validator(
|
||||
document_crumb=rule,
|
||||
allow_unknown=False,
|
||||
schema=self.target_validator.validation_rules,
|
||||
)
|
||||
|
||||
for constraints in value:
|
||||
_hash = (
|
||||
mapping_hash({'turing': constraints}),
|
||||
mapping_hash(self.target_validator.types_mapping),
|
||||
)
|
||||
if _hash in self.target_validator._valid_schemas:
|
||||
continue
|
||||
|
||||
validator(constraints, normalize=False)
|
||||
if validator._errors:
|
||||
self._error(validator._errors)
|
||||
else:
|
||||
self.target_validator._valid_schemas.add(_hash)
|
||||
|
||||
|
||||
####
|
||||
|
||||
|
||||
class Registry(object):
|
||||
""" A registry to store and retrieve schemas and parts of it by a name
|
||||
that can be used in validation schemas.
|
||||
|
||||
:param definitions: Optional, initial definitions.
|
||||
:type definitions: any :term:`mapping` """
|
||||
|
||||
def __init__(self, definitions={}):
|
||||
self._storage = {}
|
||||
self.extend(definitions)
|
||||
|
||||
def add(self, name, definition):
|
||||
""" Register a definition to the registry. Existing definitions are
|
||||
replaced silently.
|
||||
|
||||
:param name: The name which can be used as reference in a validation
|
||||
schema.
|
||||
:type name: :class:`str`
|
||||
:param definition: The definition.
|
||||
:type definition: any :term:`mapping` """
|
||||
self._storage[name] = self._expand_definition(definition)
|
||||
|
||||
def all(self):
|
||||
""" Returns a :class:`dict` with all registered definitions mapped to
|
||||
their name. """
|
||||
return self._storage
|
||||
|
||||
def clear(self):
|
||||
""" Purge all definitions in the registry. """
|
||||
self._storage.clear()
|
||||
|
||||
def extend(self, definitions):
|
||||
""" Add several definitions at once. Existing definitions are
|
||||
replaced silently.
|
||||
|
||||
:param definitions: The names and definitions.
|
||||
:type definitions: a :term:`mapping` or an :term:`iterable` with
|
||||
two-value :class:`tuple` s """
|
||||
for name, definition in dict(definitions).items():
|
||||
self.add(name, definition)
|
||||
|
||||
def get(self, name, default=None):
|
||||
""" Retrieve a definition from the registry.
|
||||
|
||||
:param name: The reference that points to the definition.
|
||||
:type name: :class:`str`
|
||||
:param default: Return value if the reference isn't registered. """
|
||||
return self._storage.get(name, default)
|
||||
|
||||
def remove(self, *names):
|
||||
""" Unregister definitions from the registry.
|
||||
|
||||
:param names: The names of the definitions that are to be
|
||||
unregistered. """
|
||||
for name in names:
|
||||
self._storage.pop(name, None)
|
||||
|
||||
|
||||
class SchemaRegistry(Registry):
|
||||
@classmethod
|
||||
def _expand_definition(cls, definition):
|
||||
return DefinitionSchema.expand(definition)
|
||||
|
||||
|
||||
class RulesSetRegistry(Registry):
|
||||
@classmethod
|
||||
def _expand_definition(cls, definition):
|
||||
return DefinitionSchema.expand({0: definition})[0]
|
||||
|
||||
|
||||
schema_registry, rules_set_registry = SchemaRegistry(), RulesSetRegistry()
|
||||
159
Lib/site-packages/pipenv/vendor/cerberus/tests/__init__.py
vendored
Normal file
159
Lib/site-packages/pipenv/vendor/cerberus/tests/__init__.py
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from cerberus import errors, Validator, SchemaError, DocumentError
|
||||
from cerberus.tests.conftest import sample_schema
|
||||
|
||||
|
||||
def assert_exception(exception, document={}, schema=None, validator=None, msg=None):
|
||||
""" Tests whether a specific exception is raised. Optionally also tests
|
||||
whether the exception message is as expected. """
|
||||
if validator is None:
|
||||
validator = Validator()
|
||||
if msg is None:
|
||||
with pytest.raises(exception):
|
||||
validator(document, schema)
|
||||
else:
|
||||
with pytest.raises(exception, match=re.escape(msg)):
|
||||
validator(document, schema)
|
||||
|
||||
|
||||
def assert_schema_error(*args):
|
||||
""" Tests whether a validation raises an exception due to a malformed
|
||||
schema. """
|
||||
assert_exception(SchemaError, *args)
|
||||
|
||||
|
||||
def assert_document_error(*args):
|
||||
""" Tests whether a validation raises an exception due to a malformed
|
||||
document. """
|
||||
assert_exception(DocumentError, *args)
|
||||
|
||||
|
||||
def assert_fail(
|
||||
document,
|
||||
schema=None,
|
||||
validator=None,
|
||||
update=False,
|
||||
error=None,
|
||||
errors=None,
|
||||
child_errors=None,
|
||||
):
|
||||
""" Tests whether a validation fails. """
|
||||
if validator is None:
|
||||
validator = Validator(sample_schema)
|
||||
result = validator(document, schema, update)
|
||||
assert isinstance(result, bool)
|
||||
assert not result
|
||||
|
||||
actual_errors = validator._errors
|
||||
|
||||
assert not (error is not None and errors is not None)
|
||||
assert not (errors is not None and child_errors is not None), (
|
||||
'child_errors can only be tested in ' 'conjunction with the error parameter'
|
||||
)
|
||||
assert not (child_errors is not None and error is None)
|
||||
if error is not None:
|
||||
assert len(actual_errors) == 1
|
||||
assert_has_error(actual_errors, *error)
|
||||
|
||||
if child_errors is not None:
|
||||
assert len(actual_errors[0].child_errors) == len(child_errors)
|
||||
assert_has_errors(actual_errors[0].child_errors, child_errors)
|
||||
|
||||
elif errors is not None:
|
||||
assert len(actual_errors) == len(errors)
|
||||
assert_has_errors(actual_errors, errors)
|
||||
|
||||
return actual_errors
|
||||
|
||||
|
||||
def assert_success(document, schema=None, validator=None, update=False):
|
||||
""" Tests whether a validation succeeds. """
|
||||
if validator is None:
|
||||
validator = Validator(sample_schema)
|
||||
result = validator(document, schema, update)
|
||||
assert isinstance(result, bool)
|
||||
if not result:
|
||||
raise AssertionError(validator.errors)
|
||||
|
||||
|
||||
def assert_has_error(_errors, d_path, s_path, error_def, constraint, info=()):
|
||||
if not isinstance(d_path, tuple):
|
||||
d_path = (d_path,)
|
||||
if not isinstance(info, tuple):
|
||||
info = (info,)
|
||||
|
||||
assert isinstance(_errors, errors.ErrorList)
|
||||
|
||||
for i, error in enumerate(_errors):
|
||||
assert isinstance(error, errors.ValidationError)
|
||||
try:
|
||||
assert error.document_path == d_path
|
||||
assert error.schema_path == s_path
|
||||
assert error.code == error_def.code
|
||||
assert error.rule == error_def.rule
|
||||
assert error.constraint == constraint
|
||||
if not error.is_group_error:
|
||||
assert error.info == info
|
||||
except AssertionError:
|
||||
pass
|
||||
except Exception:
|
||||
raise
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise AssertionError(
|
||||
"""
|
||||
Error with properties:
|
||||
document_path={doc_path}
|
||||
schema_path={schema_path}
|
||||
code={code}
|
||||
constraint={constraint}
|
||||
info={info}
|
||||
not found in errors:
|
||||
{errors}
|
||||
""".format(
|
||||
doc_path=d_path,
|
||||
schema_path=s_path,
|
||||
code=hex(error.code),
|
||||
info=info,
|
||||
constraint=constraint,
|
||||
errors=_errors,
|
||||
)
|
||||
)
|
||||
return i
|
||||
|
||||
|
||||
def assert_has_errors(_errors, _exp_errors):
|
||||
assert isinstance(_exp_errors, list)
|
||||
for error in _exp_errors:
|
||||
assert isinstance(error, tuple)
|
||||
assert_has_error(_errors, *error)
|
||||
|
||||
|
||||
def assert_not_has_error(_errors, *args, **kwargs):
|
||||
try:
|
||||
assert_has_error(_errors, *args, **kwargs)
|
||||
except AssertionError:
|
||||
pass
|
||||
except Exception as e:
|
||||
raise e
|
||||
else:
|
||||
raise AssertionError('An unexpected error occurred.')
|
||||
|
||||
|
||||
def assert_bad_type(field, data_type, value):
|
||||
assert_fail(
|
||||
{field: value}, error=(field, (field, 'type'), errors.BAD_TYPE, data_type)
|
||||
)
|
||||
|
||||
|
||||
def assert_normalized(document, expected, schema=None, validator=None):
|
||||
if validator is None:
|
||||
validator = Validator(sample_schema)
|
||||
assert_success(document, schema, validator)
|
||||
assert validator.document == expected
|
||||
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/conftest.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/conftest.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_assorted.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_assorted.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_customization.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_customization.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_errors.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_errors.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_legacy.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_legacy.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_normalization.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_normalization.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_registries.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_registries.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_schema.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_schema.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_utils.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_utils.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_validation.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/cerberus/tests/__pycache__/test_validation.cpython-38.pyc
vendored
Normal file
Binary file not shown.
81
Lib/site-packages/pipenv/vendor/cerberus/tests/conftest.py
vendored
Normal file
81
Lib/site-packages/pipenv/vendor/cerberus/tests/conftest.py
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
import pytest
|
||||
|
||||
from cerberus import Validator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def document():
|
||||
return deepcopy(sample_document)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def schema():
|
||||
return deepcopy(sample_schema)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def validator():
|
||||
return Validator(sample_schema)
|
||||
|
||||
|
||||
sample_schema = {
|
||||
'a_string': {'type': 'string', 'minlength': 2, 'maxlength': 10},
|
||||
'a_binary': {'type': 'binary', 'minlength': 2, 'maxlength': 10},
|
||||
'a_nullable_integer': {'type': 'integer', 'nullable': True},
|
||||
'an_integer': {'type': 'integer', 'min': 1, 'max': 100},
|
||||
'a_restricted_integer': {'type': 'integer', 'allowed': [-1, 0, 1]},
|
||||
'a_boolean': {'type': 'boolean', 'meta': 'can haz two distinct states'},
|
||||
'a_datetime': {'type': 'datetime', 'meta': {'format': '%a, %d. %b %Y'}},
|
||||
'a_float': {'type': 'float', 'min': 1, 'max': 100},
|
||||
'a_number': {'type': 'number', 'min': 1, 'max': 100},
|
||||
'a_set': {'type': 'set'},
|
||||
'one_or_more_strings': {'type': ['string', 'list'], 'schema': {'type': 'string'}},
|
||||
'a_regex_email': {
|
||||
'type': 'string',
|
||||
'regex': r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$',
|
||||
},
|
||||
'a_readonly_string': {'type': 'string', 'readonly': True},
|
||||
'a_restricted_string': {'type': 'string', 'allowed': ['agent', 'client', 'vendor']},
|
||||
'an_array': {'type': 'list', 'allowed': ['agent', 'client', 'vendor']},
|
||||
'an_array_from_set': {
|
||||
'type': 'list',
|
||||
'allowed': set(['agent', 'client', 'vendor']),
|
||||
},
|
||||
'a_list_of_dicts': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'sku': {'type': 'string'},
|
||||
'price': {'type': 'integer', 'required': True},
|
||||
},
|
||||
},
|
||||
},
|
||||
'a_list_of_values': {
|
||||
'type': 'list',
|
||||
'items': [{'type': 'string'}, {'type': 'integer'}],
|
||||
},
|
||||
'a_list_of_integers': {'type': 'list', 'schema': {'type': 'integer'}},
|
||||
'a_dict': {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'address': {'type': 'string'},
|
||||
'city': {'type': 'string', 'required': True},
|
||||
},
|
||||
},
|
||||
'a_dict_with_valuesrules': {'type': 'dict', 'valuesrules': {'type': 'integer'}},
|
||||
'a_list_length': {
|
||||
'type': 'list',
|
||||
'schema': {'type': 'integer'},
|
||||
'minlength': 2,
|
||||
'maxlength': 5,
|
||||
},
|
||||
'a_nullable_field_without_type': {'nullable': True},
|
||||
'a_not_nullable_field_without_type': {},
|
||||
}
|
||||
|
||||
sample_document = {'name': 'john doe'}
|
||||
111
Lib/site-packages/pipenv/vendor/cerberus/tests/test_assorted.py
vendored
Normal file
111
Lib/site-packages/pipenv/vendor/cerberus/tests/test_assorted.py
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from decimal import Decimal
|
||||
from pkg_resources import Distribution, DistributionNotFound
|
||||
|
||||
from pytest import mark
|
||||
|
||||
from cerberus import TypeDefinition, Validator
|
||||
from cerberus.tests import assert_fail, assert_success
|
||||
from cerberus.utils import validator_factory
|
||||
from cerberus.validator import BareValidator
|
||||
from cerberus.platform import PYTHON_VERSION
|
||||
|
||||
|
||||
if PYTHON_VERSION > 3 and PYTHON_VERSION < 3.4:
|
||||
from imp import reload
|
||||
elif PYTHON_VERSION >= 3.4:
|
||||
from importlib import reload
|
||||
else:
|
||||
pass # Python 2.x
|
||||
|
||||
|
||||
def test_pkgresources_version(monkeypatch):
|
||||
def create_fake_distribution(name):
|
||||
return Distribution(project_name="cerberus", version="1.2.3")
|
||||
|
||||
with monkeypatch.context() as m:
|
||||
cerberus = __import__("cerberus")
|
||||
m.setattr("pkg_resources.get_distribution", create_fake_distribution)
|
||||
reload(cerberus)
|
||||
assert cerberus.__version__ == "1.2.3"
|
||||
|
||||
|
||||
def test_version_not_found(monkeypatch):
|
||||
def raise_distribution_not_found(name):
|
||||
raise DistributionNotFound("pkg_resources cannot get distribution")
|
||||
|
||||
with monkeypatch.context() as m:
|
||||
cerberus = __import__("cerberus")
|
||||
m.setattr("pkg_resources.get_distribution", raise_distribution_not_found)
|
||||
reload(cerberus)
|
||||
assert cerberus.__version__ == "unknown"
|
||||
|
||||
|
||||
def test_clear_cache(validator):
|
||||
assert len(validator._valid_schemas) > 0
|
||||
validator.clear_caches()
|
||||
assert len(validator._valid_schemas) == 0
|
||||
|
||||
|
||||
def test_docstring(validator):
|
||||
assert validator.__doc__
|
||||
|
||||
|
||||
# Test that testing with the sample schema works as expected
|
||||
# as there might be rules with side-effects in it
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
"test,document",
|
||||
((assert_fail, {"an_integer": 60}), (assert_success, {"an_integer": 110})),
|
||||
)
|
||||
def test_that_test_fails(test, document):
|
||||
try:
|
||||
test(document)
|
||||
except AssertionError:
|
||||
pass
|
||||
else:
|
||||
raise AssertionError("test didn't fail")
|
||||
|
||||
|
||||
def test_dynamic_types():
|
||||
decimal_type = TypeDefinition("decimal", (Decimal,), ())
|
||||
document = {"measurement": Decimal(0)}
|
||||
schema = {"measurement": {"type": "decimal"}}
|
||||
|
||||
validator = Validator()
|
||||
validator.types_mapping["decimal"] = decimal_type
|
||||
assert_success(document, schema, validator)
|
||||
|
||||
class MyValidator(Validator):
|
||||
types_mapping = Validator.types_mapping.copy()
|
||||
types_mapping["decimal"] = decimal_type
|
||||
|
||||
validator = MyValidator()
|
||||
assert_success(document, schema, validator)
|
||||
|
||||
|
||||
def test_mro():
|
||||
assert Validator.__mro__ == (Validator, BareValidator, object), Validator.__mro__
|
||||
|
||||
|
||||
def test_mixin_init():
|
||||
class Mixin(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs["test"] = True
|
||||
super(Mixin, self).__init__(*args, **kwargs)
|
||||
|
||||
MyValidator = validator_factory("MyValidator", Mixin)
|
||||
validator = MyValidator()
|
||||
assert validator._config["test"]
|
||||
|
||||
|
||||
def test_sub_init():
|
||||
class MyValidator(Validator):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs["test"] = True
|
||||
super(MyValidator, self).__init__(*args, **kwargs)
|
||||
|
||||
validator = MyValidator()
|
||||
assert validator._config["test"]
|
||||
100
Lib/site-packages/pipenv/vendor/cerberus/tests/test_customization.py
vendored
Normal file
100
Lib/site-packages/pipenv/vendor/cerberus/tests/test_customization.py
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pytest import mark
|
||||
|
||||
import cerberus
|
||||
from cerberus.tests import assert_fail, assert_success
|
||||
from cerberus.tests.conftest import sample_schema
|
||||
|
||||
|
||||
def test_contextual_data_preservation():
|
||||
class InheritedValidator(cerberus.Validator):
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'working_dir' in kwargs:
|
||||
self.working_dir = kwargs['working_dir']
|
||||
super(InheritedValidator, self).__init__(*args, **kwargs)
|
||||
|
||||
def _validate_type_test(self, value):
|
||||
if self.working_dir:
|
||||
return True
|
||||
|
||||
assert 'test' in InheritedValidator.types
|
||||
v = InheritedValidator(
|
||||
{'test': {'type': 'list', 'schema': {'type': 'test'}}}, working_dir='/tmp'
|
||||
)
|
||||
assert_success({'test': ['foo']}, validator=v)
|
||||
|
||||
|
||||
def test_docstring_parsing():
|
||||
class CustomValidator(cerberus.Validator):
|
||||
def _validate_foo(self, argument, field, value):
|
||||
""" {'type': 'zap'} """
|
||||
pass
|
||||
|
||||
def _validate_bar(self, value):
|
||||
""" Test the barreness of a value.
|
||||
|
||||
The rule's arguments are validated against this schema:
|
||||
{'type': 'boolean'}
|
||||
"""
|
||||
pass
|
||||
|
||||
assert 'foo' in CustomValidator.validation_rules
|
||||
assert 'bar' in CustomValidator.validation_rules
|
||||
|
||||
|
||||
# TODO remove 'validator' as rule parameter with the next major release
|
||||
@mark.parametrize('rule', ('check_with', 'validator'))
|
||||
def test_check_with_method(rule):
|
||||
# https://github.com/pyeve/cerberus/issues/265
|
||||
class MyValidator(cerberus.Validator):
|
||||
def _check_with_oddity(self, field, value):
|
||||
if not value & 1:
|
||||
self._error(field, "Must be an odd number")
|
||||
|
||||
v = MyValidator(schema={'amount': {rule: 'oddity'}})
|
||||
assert_success(document={'amount': 1}, validator=v)
|
||||
assert_fail(
|
||||
document={'amount': 2},
|
||||
validator=v,
|
||||
error=('amount', (), cerberus.errors.CUSTOM, None, ('Must be an odd number',)),
|
||||
)
|
||||
|
||||
|
||||
# TODO remove test with the next major release
|
||||
@mark.parametrize('rule', ('check_with', 'validator'))
|
||||
def test_validator_method(rule):
|
||||
class MyValidator(cerberus.Validator):
|
||||
def _validator_oddity(self, field, value):
|
||||
if not value & 1:
|
||||
self._error(field, "Must be an odd number")
|
||||
|
||||
v = MyValidator(schema={'amount': {rule: 'oddity'}})
|
||||
assert_success(document={'amount': 1}, validator=v)
|
||||
assert_fail(
|
||||
document={'amount': 2},
|
||||
validator=v,
|
||||
error=('amount', (), cerberus.errors.CUSTOM, None, ('Must be an odd number',)),
|
||||
)
|
||||
|
||||
|
||||
def test_schema_validation_can_be_disabled_in_schema_setter():
|
||||
class NonvalidatingValidator(cerberus.Validator):
|
||||
"""
|
||||
Skips schema validation to speed up initialization
|
||||
"""
|
||||
|
||||
@cerberus.Validator.schema.setter
|
||||
def schema(self, schema):
|
||||
if schema is None:
|
||||
self._schema = None
|
||||
elif self.is_child:
|
||||
self._schema = schema
|
||||
elif isinstance(schema, cerberus.schema.DefinitionSchema):
|
||||
self._schema = schema
|
||||
else:
|
||||
self._schema = cerberus.schema.UnvalidatedSchema(schema)
|
||||
|
||||
v = NonvalidatingValidator(schema=sample_schema)
|
||||
assert v.validate(document={'an_integer': 1})
|
||||
assert not v.validate(document={'an_integer': 'a'})
|
||||
336
Lib/site-packages/pipenv/vendor/cerberus/tests/test_errors.py
vendored
Normal file
336
Lib/site-packages/pipenv/vendor/cerberus/tests/test_errors.py
vendored
Normal file
@@ -0,0 +1,336 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from cerberus import Validator, errors
|
||||
from cerberus.tests import assert_fail
|
||||
|
||||
|
||||
ValidationError = errors.ValidationError
|
||||
|
||||
|
||||
def test__error_1():
|
||||
v = Validator(schema={'foo': {'type': 'string'}})
|
||||
v.document = {'foo': 42}
|
||||
v._error('foo', errors.BAD_TYPE, 'string')
|
||||
error = v._errors[0]
|
||||
assert error.document_path == ('foo',)
|
||||
assert error.schema_path == ('foo', 'type')
|
||||
assert error.code == 0x24
|
||||
assert error.rule == 'type'
|
||||
assert error.constraint == 'string'
|
||||
assert error.value == 42
|
||||
assert error.info == ('string',)
|
||||
assert not error.is_group_error
|
||||
assert not error.is_logic_error
|
||||
|
||||
|
||||
def test__error_2():
|
||||
v = Validator(schema={'foo': {'keysrules': {'type': 'integer'}}})
|
||||
v.document = {'foo': {'0': 'bar'}}
|
||||
v._error('foo', errors.KEYSRULES, ())
|
||||
error = v._errors[0]
|
||||
assert error.document_path == ('foo',)
|
||||
assert error.schema_path == ('foo', 'keysrules')
|
||||
assert error.code == 0x83
|
||||
assert error.rule == 'keysrules'
|
||||
assert error.constraint == {'type': 'integer'}
|
||||
assert error.value == {'0': 'bar'}
|
||||
assert error.info == ((),)
|
||||
assert error.is_group_error
|
||||
assert not error.is_logic_error
|
||||
|
||||
|
||||
def test__error_3():
|
||||
valids = [
|
||||
{'type': 'string', 'regex': '0x[0-9a-f]{2}'},
|
||||
{'type': 'integer', 'min': 0, 'max': 255},
|
||||
]
|
||||
v = Validator(schema={'foo': {'oneof': valids}})
|
||||
v.document = {'foo': '0x100'}
|
||||
v._error('foo', errors.ONEOF, (), 0, 2)
|
||||
error = v._errors[0]
|
||||
assert error.document_path == ('foo',)
|
||||
assert error.schema_path == ('foo', 'oneof')
|
||||
assert error.code == 0x92
|
||||
assert error.rule == 'oneof'
|
||||
assert error.constraint == valids
|
||||
assert error.value == '0x100'
|
||||
assert error.info == ((), 0, 2)
|
||||
assert error.is_group_error
|
||||
assert error.is_logic_error
|
||||
|
||||
|
||||
def test_error_tree_from_subschema(validator):
|
||||
schema = {'foo': {'schema': {'bar': {'type': 'string'}}}}
|
||||
document = {'foo': {'bar': 0}}
|
||||
assert_fail(document, schema, validator=validator)
|
||||
d_error_tree = validator.document_error_tree
|
||||
s_error_tree = validator.schema_error_tree
|
||||
|
||||
assert 'foo' in d_error_tree
|
||||
|
||||
assert len(d_error_tree['foo'].errors) == 1, d_error_tree['foo']
|
||||
assert d_error_tree['foo'].errors[0].code == errors.MAPPING_SCHEMA.code
|
||||
assert 'bar' in d_error_tree['foo']
|
||||
assert d_error_tree['foo']['bar'].errors[0].value == 0
|
||||
assert d_error_tree.fetch_errors_from(('foo', 'bar'))[0].value == 0
|
||||
|
||||
assert 'foo' in s_error_tree
|
||||
assert 'schema' in s_error_tree['foo']
|
||||
assert 'bar' in s_error_tree['foo']['schema']
|
||||
assert 'type' in s_error_tree['foo']['schema']['bar']
|
||||
assert s_error_tree['foo']['schema']['bar']['type'].errors[0].value == 0
|
||||
assert (
|
||||
s_error_tree.fetch_errors_from(('foo', 'schema', 'bar', 'type'))[0].value == 0
|
||||
)
|
||||
|
||||
|
||||
def test_error_tree_from_anyof(validator):
|
||||
schema = {'foo': {'anyof': [{'type': 'string'}, {'type': 'integer'}]}}
|
||||
document = {'foo': []}
|
||||
assert_fail(document, schema, validator=validator)
|
||||
d_error_tree = validator.document_error_tree
|
||||
s_error_tree = validator.schema_error_tree
|
||||
assert 'foo' in d_error_tree
|
||||
assert d_error_tree['foo'].errors[0].value == []
|
||||
assert 'foo' in s_error_tree
|
||||
assert 'anyof' in s_error_tree['foo']
|
||||
assert 0 in s_error_tree['foo']['anyof']
|
||||
assert 1 in s_error_tree['foo']['anyof']
|
||||
assert 'type' in s_error_tree['foo']['anyof'][0]
|
||||
assert s_error_tree['foo']['anyof'][0]['type'].errors[0].value == []
|
||||
|
||||
|
||||
def test_nested_error_paths(validator):
|
||||
schema = {
|
||||
'a_dict': {
|
||||
'keysrules': {'type': 'integer'},
|
||||
'valuesrules': {'regex': '[a-z]*'},
|
||||
},
|
||||
'a_list': {'schema': {'type': 'string', 'oneof_regex': ['[a-z]*$', '[A-Z]*']}},
|
||||
}
|
||||
document = {
|
||||
'a_dict': {0: 'abc', 'one': 'abc', 2: 'aBc', 'three': 'abC'},
|
||||
'a_list': [0, 'abc', 'abC'],
|
||||
}
|
||||
assert_fail(document, schema, validator=validator)
|
||||
|
||||
_det = validator.document_error_tree
|
||||
_set = validator.schema_error_tree
|
||||
|
||||
assert len(_det.errors) == 0
|
||||
assert len(_set.errors) == 0
|
||||
|
||||
assert len(_det['a_dict'].errors) == 2
|
||||
assert len(_set['a_dict'].errors) == 0
|
||||
|
||||
assert _det['a_dict'][0] is None
|
||||
assert len(_det['a_dict']['one'].errors) == 1
|
||||
assert len(_det['a_dict'][2].errors) == 1
|
||||
assert len(_det['a_dict']['three'].errors) == 2
|
||||
|
||||
assert len(_set['a_dict']['keysrules'].errors) == 1
|
||||
assert len(_set['a_dict']['valuesrules'].errors) == 1
|
||||
|
||||
assert len(_set['a_dict']['keysrules']['type'].errors) == 2
|
||||
assert len(_set['a_dict']['valuesrules']['regex'].errors) == 2
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_dict', 'one'),
|
||||
('a_dict', 'keysrules', 'type'),
|
||||
errors.BAD_TYPE.code,
|
||||
'type',
|
||||
'integer',
|
||||
'one',
|
||||
(),
|
||||
)
|
||||
assert _det['a_dict']['one'].errors[0] == _ref_err
|
||||
assert _set['a_dict']['keysrules']['type'].errors[0] == _ref_err
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_dict', 2),
|
||||
('a_dict', 'valuesrules', 'regex'),
|
||||
errors.REGEX_MISMATCH.code,
|
||||
'regex',
|
||||
'[a-z]*$',
|
||||
'aBc',
|
||||
(),
|
||||
)
|
||||
assert _det['a_dict'][2].errors[0] == _ref_err
|
||||
assert _set['a_dict']['valuesrules']['regex'].errors[0] == _ref_err
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_dict', 'three'),
|
||||
('a_dict', 'keysrules', 'type'),
|
||||
errors.BAD_TYPE.code,
|
||||
'type',
|
||||
'integer',
|
||||
'three',
|
||||
(),
|
||||
)
|
||||
assert _det['a_dict']['three'].errors[0] == _ref_err
|
||||
assert _set['a_dict']['keysrules']['type'].errors[1] == _ref_err
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_dict', 'three'),
|
||||
('a_dict', 'valuesrules', 'regex'),
|
||||
errors.REGEX_MISMATCH.code,
|
||||
'regex',
|
||||
'[a-z]*$',
|
||||
'abC',
|
||||
(),
|
||||
)
|
||||
assert _det['a_dict']['three'].errors[1] == _ref_err
|
||||
assert _set['a_dict']['valuesrules']['regex'].errors[1] == _ref_err
|
||||
|
||||
assert len(_det['a_list'].errors) == 1
|
||||
assert len(_det['a_list'][0].errors) == 1
|
||||
assert _det['a_list'][1] is None
|
||||
assert len(_det['a_list'][2].errors) == 3
|
||||
assert len(_set['a_list'].errors) == 0
|
||||
assert len(_set['a_list']['schema'].errors) == 1
|
||||
assert len(_set['a_list']['schema']['type'].errors) == 1
|
||||
assert len(_set['a_list']['schema']['oneof'][0]['regex'].errors) == 1
|
||||
assert len(_set['a_list']['schema']['oneof'][1]['regex'].errors) == 1
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_list', 0),
|
||||
('a_list', 'schema', 'type'),
|
||||
errors.BAD_TYPE.code,
|
||||
'type',
|
||||
'string',
|
||||
0,
|
||||
(),
|
||||
)
|
||||
assert _det['a_list'][0].errors[0] == _ref_err
|
||||
assert _set['a_list']['schema']['type'].errors[0] == _ref_err
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_list', 2),
|
||||
('a_list', 'schema', 'oneof'),
|
||||
errors.ONEOF.code,
|
||||
'oneof',
|
||||
'irrelevant_at_this_point',
|
||||
'abC',
|
||||
(),
|
||||
)
|
||||
assert _det['a_list'][2].errors[0] == _ref_err
|
||||
assert _set['a_list']['schema']['oneof'].errors[0] == _ref_err
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_list', 2),
|
||||
('a_list', 'schema', 'oneof', 0, 'regex'),
|
||||
errors.REGEX_MISMATCH.code,
|
||||
'regex',
|
||||
'[a-z]*$',
|
||||
'abC',
|
||||
(),
|
||||
)
|
||||
assert _det['a_list'][2].errors[1] == _ref_err
|
||||
assert _set['a_list']['schema']['oneof'][0]['regex'].errors[0] == _ref_err
|
||||
|
||||
_ref_err = ValidationError(
|
||||
('a_list', 2),
|
||||
('a_list', 'schema', 'oneof', 1, 'regex'),
|
||||
errors.REGEX_MISMATCH.code,
|
||||
'regex',
|
||||
'[a-z]*$',
|
||||
'abC',
|
||||
(),
|
||||
)
|
||||
assert _det['a_list'][2].errors[2] == _ref_err
|
||||
assert _set['a_list']['schema']['oneof'][1]['regex'].errors[0] == _ref_err
|
||||
|
||||
|
||||
def test_queries():
|
||||
schema = {'foo': {'type': 'dict', 'schema': {'bar': {'type': 'number'}}}}
|
||||
document = {'foo': {'bar': 'zero'}}
|
||||
validator = Validator(schema)
|
||||
validator(document)
|
||||
|
||||
assert 'foo' in validator.document_error_tree
|
||||
assert 'bar' in validator.document_error_tree['foo']
|
||||
assert 'foo' in validator.schema_error_tree
|
||||
assert 'schema' in validator.schema_error_tree['foo']
|
||||
|
||||
assert errors.MAPPING_SCHEMA in validator.document_error_tree['foo'].errors
|
||||
assert errors.MAPPING_SCHEMA in validator.document_error_tree['foo']
|
||||
assert errors.BAD_TYPE in validator.document_error_tree['foo']['bar']
|
||||
assert errors.MAPPING_SCHEMA in validator.schema_error_tree['foo']['schema']
|
||||
assert (
|
||||
errors.BAD_TYPE in validator.schema_error_tree['foo']['schema']['bar']['type']
|
||||
)
|
||||
|
||||
assert (
|
||||
validator.document_error_tree['foo'][errors.MAPPING_SCHEMA].child_errors[0].code
|
||||
== errors.BAD_TYPE.code
|
||||
)
|
||||
|
||||
|
||||
def test_basic_error_handler():
|
||||
handler = errors.BasicErrorHandler()
|
||||
_errors, ref = [], {}
|
||||
|
||||
_errors.append(ValidationError(['foo'], ['foo'], 0x63, 'readonly', True, None, ()))
|
||||
ref.update({'foo': [handler.messages[0x63]]})
|
||||
assert handler(_errors) == ref
|
||||
|
||||
_errors.append(ValidationError(['bar'], ['foo'], 0x42, 'min', 1, 2, ()))
|
||||
ref.update({'bar': [handler.messages[0x42].format(constraint=1)]})
|
||||
assert handler(_errors) == ref
|
||||
|
||||
_errors.append(
|
||||
ValidationError(
|
||||
['zap', 'foo'], ['zap', 'schema', 'foo'], 0x24, 'type', 'string', True, ()
|
||||
)
|
||||
)
|
||||
ref.update({'zap': [{'foo': [handler.messages[0x24].format(constraint='string')]}]})
|
||||
assert handler(_errors) == ref
|
||||
|
||||
_errors.append(
|
||||
ValidationError(
|
||||
['zap', 'foo'],
|
||||
['zap', 'schema', 'foo'],
|
||||
0x41,
|
||||
'regex',
|
||||
'^p[äe]ng$',
|
||||
'boom',
|
||||
(),
|
||||
)
|
||||
)
|
||||
ref['zap'][0]['foo'].append(handler.messages[0x41].format(constraint='^p[äe]ng$'))
|
||||
assert handler(_errors) == ref
|
||||
|
||||
|
||||
def test_basic_error_of_errors(validator):
|
||||
schema = {'foo': {'oneof': [{'type': 'integer'}, {'type': 'string'}]}}
|
||||
document = {'foo': 23.42}
|
||||
error = ('foo', ('foo', 'oneof'), errors.ONEOF, schema['foo']['oneof'], ())
|
||||
child_errors = [
|
||||
(error[0], error[1] + (0, 'type'), errors.BAD_TYPE, 'integer'),
|
||||
(error[0], error[1] + (1, 'type'), errors.BAD_TYPE, 'string'),
|
||||
]
|
||||
assert_fail(
|
||||
document, schema, validator=validator, error=error, child_errors=child_errors
|
||||
)
|
||||
assert validator.errors == {
|
||||
'foo': [
|
||||
errors.BasicErrorHandler.messages[0x92],
|
||||
{
|
||||
'oneof definition 0': ['must be of integer type'],
|
||||
'oneof definition 1': ['must be of string type'],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def test_wrong_amount_of_items(validator):
|
||||
# https://github.com/pyeve/cerberus/issues/505
|
||||
validator.schema = {
|
||||
'test_list': {
|
||||
'type': 'list',
|
||||
'required': True,
|
||||
'items': [{'type': 'string'}, {'type': 'string'}],
|
||||
}
|
||||
}
|
||||
validator({'test_list': ['test']})
|
||||
assert validator.errors == {'test_list': ["length of list should be 2, it is 1"]}
|
||||
3
Lib/site-packages/pipenv/vendor/cerberus/tests/test_legacy.py
vendored
Normal file
3
Lib/site-packages/pipenv/vendor/cerberus/tests/test_legacy.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
pass
|
||||
543
Lib/site-packages/pipenv/vendor/cerberus/tests/test_normalization.py
vendored
Normal file
543
Lib/site-packages/pipenv/vendor/cerberus/tests/test_normalization.py
vendored
Normal file
@@ -0,0 +1,543 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from copy import deepcopy
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from pytest import mark
|
||||
|
||||
from cerberus import Validator, errors
|
||||
from cerberus.tests import (
|
||||
assert_fail,
|
||||
assert_has_error,
|
||||
assert_normalized,
|
||||
assert_success,
|
||||
)
|
||||
|
||||
|
||||
def must_not_be_called(*args, **kwargs):
|
||||
raise RuntimeError('This shall not be called.')
|
||||
|
||||
|
||||
def test_coerce():
|
||||
schema = {'amount': {'coerce': int}}
|
||||
document = {'amount': '1'}
|
||||
expected = {'amount': 1}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_coerce_in_dictschema():
|
||||
schema = {'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}}
|
||||
document = {'thing': {'amount': '2'}}
|
||||
expected = {'thing': {'amount': 2}}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_coerce_in_listschema():
|
||||
schema = {'things': {'type': 'list', 'schema': {'coerce': int}}}
|
||||
document = {'things': ['1', '2', '3']}
|
||||
expected = {'things': [1, 2, 3]}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_coerce_in_listitems():
|
||||
schema = {'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str}]}}
|
||||
document = {'things': ['1', 2]}
|
||||
expected = {'things': [1, '2']}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
validator = Validator(schema)
|
||||
document['things'].append(3)
|
||||
assert not validator(document)
|
||||
assert validator.document['things'] == document['things']
|
||||
|
||||
|
||||
def test_coerce_in_dictschema_in_listschema():
|
||||
item_schema = {'type': 'dict', 'schema': {'amount': {'coerce': int}}}
|
||||
schema = {'things': {'type': 'list', 'schema': item_schema}}
|
||||
document = {'things': [{'amount': '2'}]}
|
||||
expected = {'things': [{'amount': 2}]}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_coerce_not_destructive():
|
||||
schema = {'amount': {'coerce': int}}
|
||||
v = Validator(schema)
|
||||
doc = {'amount': '1'}
|
||||
v.validate(doc)
|
||||
assert v.document is not doc
|
||||
|
||||
|
||||
def test_coerce_catches_ValueError():
|
||||
schema = {'amount': {'coerce': int}}
|
||||
_errors = assert_fail({'amount': 'not_a_number'}, schema)
|
||||
_errors[0].info = () # ignore exception message here
|
||||
assert_has_error(
|
||||
_errors, 'amount', ('amount', 'coerce'), errors.COERCION_FAILED, int
|
||||
)
|
||||
|
||||
|
||||
def test_coerce_in_listitems_catches_ValueError():
|
||||
schema = {'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str}]}}
|
||||
document = {'things': ['not_a_number', 2]}
|
||||
_errors = assert_fail(document, schema)
|
||||
_errors[0].info = () # ignore exception message here
|
||||
assert_has_error(
|
||||
_errors,
|
||||
('things', 0),
|
||||
('things', 'items', 'coerce'),
|
||||
errors.COERCION_FAILED,
|
||||
int,
|
||||
)
|
||||
|
||||
|
||||
def test_coerce_catches_TypeError():
|
||||
schema = {'name': {'coerce': str.lower}}
|
||||
_errors = assert_fail({'name': 1234}, schema)
|
||||
_errors[0].info = () # ignore exception message here
|
||||
assert_has_error(
|
||||
_errors, 'name', ('name', 'coerce'), errors.COERCION_FAILED, str.lower
|
||||
)
|
||||
|
||||
|
||||
def test_coerce_in_listitems_catches_TypeError():
|
||||
schema = {
|
||||
'things': {'type': 'list', 'items': [{'coerce': int}, {'coerce': str.lower}]}
|
||||
}
|
||||
document = {'things': ['1', 2]}
|
||||
_errors = assert_fail(document, schema)
|
||||
_errors[0].info = () # ignore exception message here
|
||||
assert_has_error(
|
||||
_errors,
|
||||
('things', 1),
|
||||
('things', 'items', 'coerce'),
|
||||
errors.COERCION_FAILED,
|
||||
str.lower,
|
||||
)
|
||||
|
||||
|
||||
def test_coerce_unknown():
|
||||
schema = {'foo': {'schema': {}, 'allow_unknown': {'coerce': int}}}
|
||||
document = {'foo': {'bar': '0'}}
|
||||
expected = {'foo': {'bar': 0}}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_custom_coerce_and_rename():
|
||||
class MyNormalizer(Validator):
|
||||
def __init__(self, multiplier, *args, **kwargs):
|
||||
super(MyNormalizer, self).__init__(*args, **kwargs)
|
||||
self.multiplier = multiplier
|
||||
|
||||
def _normalize_coerce_multiply(self, value):
|
||||
return value * self.multiplier
|
||||
|
||||
v = MyNormalizer(2, {'foo': {'coerce': 'multiply'}})
|
||||
assert v.normalized({'foo': 2})['foo'] == 4
|
||||
|
||||
v = MyNormalizer(3, allow_unknown={'rename_handler': 'multiply'})
|
||||
assert v.normalized({3: None}) == {9: None}
|
||||
|
||||
|
||||
def test_coerce_chain():
|
||||
drop_prefix = lambda x: x[2:] # noqa: E731
|
||||
upper = lambda x: x.upper() # noqa: E731
|
||||
schema = {'foo': {'coerce': [hex, drop_prefix, upper]}}
|
||||
assert_normalized({'foo': 15}, {'foo': 'F'}, schema)
|
||||
|
||||
|
||||
def test_coerce_chain_aborts(validator):
|
||||
def dont_do_me(value):
|
||||
raise AssertionError('The coercion chain did not abort after an ' 'error.')
|
||||
|
||||
schema = {'foo': {'coerce': [hex, dont_do_me]}}
|
||||
validator({'foo': '0'}, schema)
|
||||
assert errors.COERCION_FAILED in validator._errors
|
||||
|
||||
|
||||
def test_coerce_non_digit_in_sequence(validator):
|
||||
# https://github.com/pyeve/cerberus/issues/211
|
||||
schema = {'data': {'type': 'list', 'schema': {'type': 'integer', 'coerce': int}}}
|
||||
document = {'data': ['q']}
|
||||
assert validator.validated(document, schema) is None
|
||||
assert (
|
||||
validator.validated(document, schema, always_return_document=True) == document
|
||||
) # noqa: W503
|
||||
|
||||
|
||||
def test_nullables_dont_fail_coerce():
|
||||
schema = {'foo': {'coerce': int, 'nullable': True, 'type': 'integer'}}
|
||||
document = {'foo': None}
|
||||
assert_normalized(document, document, schema)
|
||||
|
||||
|
||||
def test_nullables_fail_coerce_on_non_null_values(validator):
|
||||
def failing_coercion(value):
|
||||
raise Exception("expected to fail")
|
||||
|
||||
schema = {'foo': {'coerce': failing_coercion, 'nullable': True, 'type': 'integer'}}
|
||||
document = {'foo': None}
|
||||
assert_normalized(document, document, schema)
|
||||
|
||||
validator({'foo': 2}, schema)
|
||||
assert errors.COERCION_FAILED in validator._errors
|
||||
|
||||
|
||||
def test_normalized():
|
||||
schema = {'amount': {'coerce': int}}
|
||||
document = {'amount': '2'}
|
||||
expected = {'amount': 2}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_rename(validator):
|
||||
schema = {'foo': {'rename': 'bar'}}
|
||||
document = {'foo': 0}
|
||||
expected = {'bar': 0}
|
||||
# We cannot use assertNormalized here since there is bug where
|
||||
# Cerberus says that the renamed field is an unknown field:
|
||||
# {'bar': 'unknown field'}
|
||||
validator(document, schema, False)
|
||||
assert validator.document == expected
|
||||
|
||||
|
||||
def test_rename_handler():
|
||||
validator = Validator(allow_unknown={'rename_handler': int})
|
||||
schema = {}
|
||||
document = {'0': 'foo'}
|
||||
expected = {0: 'foo'}
|
||||
assert_normalized(document, expected, schema, validator)
|
||||
|
||||
|
||||
def test_purge_unknown():
|
||||
validator = Validator(purge_unknown=True)
|
||||
schema = {'foo': {'type': 'string'}}
|
||||
document = {'bar': 'foo'}
|
||||
expected = {}
|
||||
assert_normalized(document, expected, schema, validator)
|
||||
|
||||
|
||||
def test_purge_unknown_in_subschema():
|
||||
schema = {
|
||||
'foo': {
|
||||
'type': 'dict',
|
||||
'schema': {'foo': {'type': 'string'}},
|
||||
'purge_unknown': True,
|
||||
}
|
||||
}
|
||||
document = {'foo': {'bar': ''}}
|
||||
expected = {'foo': {}}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_issue_147_complex():
|
||||
schema = {'revision': {'coerce': int}}
|
||||
document = {'revision': '5', 'file': NamedTemporaryFile(mode='w+')}
|
||||
document['file'].write(r'foobar')
|
||||
document['file'].seek(0)
|
||||
normalized = Validator(schema, allow_unknown=True).normalized(document)
|
||||
assert normalized['revision'] == 5
|
||||
assert normalized['file'].read() == 'foobar'
|
||||
document['file'].close()
|
||||
normalized['file'].close()
|
||||
|
||||
|
||||
def test_issue_147_nested_dict():
|
||||
schema = {'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}}
|
||||
ref_obj = '2'
|
||||
document = {'thing': {'amount': ref_obj}}
|
||||
normalized = Validator(schema).normalized(document)
|
||||
assert document is not normalized
|
||||
assert normalized['thing']['amount'] == 2
|
||||
assert ref_obj == '2'
|
||||
assert document['thing']['amount'] is ref_obj
|
||||
|
||||
|
||||
def test_coerce_in_valuesrules():
|
||||
# https://github.com/pyeve/cerberus/issues/155
|
||||
schema = {
|
||||
'thing': {'type': 'dict', 'valuesrules': {'coerce': int, 'type': 'integer'}}
|
||||
}
|
||||
document = {'thing': {'amount': '2'}}
|
||||
expected = {'thing': {'amount': 2}}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_coerce_in_keysrules():
|
||||
# https://github.com/pyeve/cerberus/issues/155
|
||||
schema = {
|
||||
'thing': {'type': 'dict', 'keysrules': {'coerce': int, 'type': 'integer'}}
|
||||
}
|
||||
document = {'thing': {'5': 'foo'}}
|
||||
expected = {'thing': {5: 'foo'}}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_coercion_of_sequence_items(validator):
|
||||
# https://github.com/pyeve/cerberus/issues/161
|
||||
schema = {'a_list': {'type': 'list', 'schema': {'type': 'float', 'coerce': float}}}
|
||||
document = {'a_list': [3, 4, 5]}
|
||||
expected = {'a_list': [3.0, 4.0, 5.0]}
|
||||
assert_normalized(document, expected, schema, validator)
|
||||
for x in validator.document['a_list']:
|
||||
assert isinstance(x, float)
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
'default', ({'default': 'bar_value'}, {'default_setter': lambda doc: 'bar_value'})
|
||||
)
|
||||
def test_default_missing(default):
|
||||
bar_schema = {'type': 'string'}
|
||||
bar_schema.update(default)
|
||||
schema = {'foo': {'type': 'string'}, 'bar': bar_schema}
|
||||
document = {'foo': 'foo_value'}
|
||||
expected = {'foo': 'foo_value', 'bar': 'bar_value'}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
'default', ({'default': 'bar_value'}, {'default_setter': must_not_be_called})
|
||||
)
|
||||
def test_default_existent(default):
|
||||
bar_schema = {'type': 'string'}
|
||||
bar_schema.update(default)
|
||||
schema = {'foo': {'type': 'string'}, 'bar': bar_schema}
|
||||
document = {'foo': 'foo_value', 'bar': 'non_default'}
|
||||
assert_normalized(document, document.copy(), schema)
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
'default', ({'default': 'bar_value'}, {'default_setter': must_not_be_called})
|
||||
)
|
||||
def test_default_none_nullable(default):
|
||||
bar_schema = {'type': 'string', 'nullable': True}
|
||||
bar_schema.update(default)
|
||||
schema = {'foo': {'type': 'string'}, 'bar': bar_schema}
|
||||
document = {'foo': 'foo_value', 'bar': None}
|
||||
assert_normalized(document, document.copy(), schema)
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
'default', ({'default': 'bar_value'}, {'default_setter': lambda doc: 'bar_value'})
|
||||
)
|
||||
def test_default_none_nonnullable(default):
|
||||
bar_schema = {'type': 'string', 'nullable': False}
|
||||
bar_schema.update(default)
|
||||
schema = {'foo': {'type': 'string'}, 'bar': bar_schema}
|
||||
document = {'foo': 'foo_value', 'bar': None}
|
||||
expected = {'foo': 'foo_value', 'bar': 'bar_value'}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_default_none_default_value():
|
||||
schema = {
|
||||
'foo': {'type': 'string'},
|
||||
'bar': {'type': 'string', 'nullable': True, 'default': None},
|
||||
}
|
||||
document = {'foo': 'foo_value'}
|
||||
expected = {'foo': 'foo_value', 'bar': None}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
'default', ({'default': 'bar_value'}, {'default_setter': lambda doc: 'bar_value'})
|
||||
)
|
||||
def test_default_missing_in_subschema(default):
|
||||
bar_schema = {'type': 'string'}
|
||||
bar_schema.update(default)
|
||||
schema = {
|
||||
'thing': {
|
||||
'type': 'dict',
|
||||
'schema': {'foo': {'type': 'string'}, 'bar': bar_schema},
|
||||
}
|
||||
}
|
||||
document = {'thing': {'foo': 'foo_value'}}
|
||||
expected = {'thing': {'foo': 'foo_value', 'bar': 'bar_value'}}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_depending_default_setters():
|
||||
schema = {
|
||||
'a': {'type': 'integer'},
|
||||
'b': {'type': 'integer', 'default_setter': lambda d: d['a'] + 1},
|
||||
'c': {'type': 'integer', 'default_setter': lambda d: d['b'] * 2},
|
||||
'd': {'type': 'integer', 'default_setter': lambda d: d['b'] + d['c']},
|
||||
}
|
||||
document = {'a': 1}
|
||||
expected = {'a': 1, 'b': 2, 'c': 4, 'd': 6}
|
||||
assert_normalized(document, expected, schema)
|
||||
|
||||
|
||||
def test_circular_depending_default_setters(validator):
|
||||
schema = {
|
||||
'a': {'type': 'integer', 'default_setter': lambda d: d['b'] + 1},
|
||||
'b': {'type': 'integer', 'default_setter': lambda d: d['a'] + 1},
|
||||
}
|
||||
validator({}, schema)
|
||||
assert errors.SETTING_DEFAULT_FAILED in validator._errors
|
||||
|
||||
|
||||
def test_issue_250():
|
||||
# https://github.com/pyeve/cerberus/issues/250
|
||||
schema = {
|
||||
'list': {
|
||||
'type': 'list',
|
||||
'schema': {
|
||||
'type': 'dict',
|
||||
'allow_unknown': True,
|
||||
'schema': {'a': {'type': 'string'}},
|
||||
},
|
||||
}
|
||||
}
|
||||
document = {'list': {'is_a': 'mapping'}}
|
||||
assert_fail(
|
||||
document,
|
||||
schema,
|
||||
error=('list', ('list', 'type'), errors.BAD_TYPE, schema['list']['type']),
|
||||
)
|
||||
|
||||
|
||||
def test_issue_250_no_type_pass_on_list():
|
||||
# https://github.com/pyeve/cerberus/issues/250
|
||||
schema = {
|
||||
'list': {
|
||||
'schema': {
|
||||
'allow_unknown': True,
|
||||
'type': 'dict',
|
||||
'schema': {'a': {'type': 'string'}},
|
||||
}
|
||||
}
|
||||
}
|
||||
document = {'list': [{'a': 'known', 'b': 'unknown'}]}
|
||||
assert_normalized(document, document, schema)
|
||||
|
||||
|
||||
def test_issue_250_no_type_fail_on_dict():
|
||||
# https://github.com/pyeve/cerberus/issues/250
|
||||
schema = {
|
||||
'list': {'schema': {'allow_unknown': True, 'schema': {'a': {'type': 'string'}}}}
|
||||
}
|
||||
document = {'list': {'a': {'a': 'known'}}}
|
||||
assert_fail(
|
||||
document,
|
||||
schema,
|
||||
error=(
|
||||
'list',
|
||||
('list', 'schema'),
|
||||
errors.BAD_TYPE_FOR_SCHEMA,
|
||||
schema['list']['schema'],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def test_issue_250_no_type_fail_pass_on_other():
|
||||
# https://github.com/pyeve/cerberus/issues/250
|
||||
schema = {
|
||||
'list': {'schema': {'allow_unknown': True, 'schema': {'a': {'type': 'string'}}}}
|
||||
}
|
||||
document = {'list': 1}
|
||||
assert_normalized(document, document, schema)
|
||||
|
||||
|
||||
def test_allow_unknown_with_of_rules():
|
||||
# https://github.com/pyeve/cerberus/issues/251
|
||||
schema = {
|
||||
'test': {
|
||||
'oneof': [
|
||||
{
|
||||
'type': 'dict',
|
||||
'allow_unknown': True,
|
||||
'schema': {'known': {'type': 'string'}},
|
||||
},
|
||||
{'type': 'dict', 'schema': {'known': {'type': 'string'}}},
|
||||
]
|
||||
}
|
||||
}
|
||||
# check regression and that allow unknown does not cause any different
|
||||
# than expected behaviour for one-of.
|
||||
document = {'test': {'known': 's'}}
|
||||
assert_fail(
|
||||
document,
|
||||
schema,
|
||||
error=('test', ('test', 'oneof'), errors.ONEOF, schema['test']['oneof']),
|
||||
)
|
||||
# check that allow_unknown is actually applied
|
||||
document = {'test': {'known': 's', 'unknown': 'asd'}}
|
||||
assert_success(document, schema)
|
||||
|
||||
|
||||
def test_271_normalising_tuples():
|
||||
# https://github.com/pyeve/cerberus/issues/271
|
||||
schema = {
|
||||
'my_field': {'type': 'list', 'schema': {'type': ('string', 'number', 'dict')}}
|
||||
}
|
||||
document = {'my_field': ('foo', 'bar', 42, 'albert', 'kandinsky', {'items': 23})}
|
||||
assert_success(document, schema)
|
||||
|
||||
normalized = Validator(schema).normalized(document)
|
||||
assert normalized['my_field'] == (
|
||||
'foo',
|
||||
'bar',
|
||||
42,
|
||||
'albert',
|
||||
'kandinsky',
|
||||
{'items': 23},
|
||||
)
|
||||
|
||||
|
||||
def test_allow_unknown_wo_schema():
|
||||
# https://github.com/pyeve/cerberus/issues/302
|
||||
v = Validator({'a': {'type': 'dict', 'allow_unknown': True}})
|
||||
v({'a': {}})
|
||||
|
||||
|
||||
def test_allow_unknown_with_purge_unknown():
|
||||
validator = Validator(purge_unknown=True)
|
||||
schema = {'foo': {'type': 'dict', 'allow_unknown': True}}
|
||||
document = {'foo': {'bar': True}, 'bar': 'foo'}
|
||||
expected = {'foo': {'bar': True}}
|
||||
assert_normalized(document, expected, schema, validator)
|
||||
|
||||
|
||||
def test_allow_unknown_with_purge_unknown_subdocument():
|
||||
validator = Validator(purge_unknown=True)
|
||||
schema = {
|
||||
'foo': {
|
||||
'type': 'dict',
|
||||
'schema': {'bar': {'type': 'string'}},
|
||||
'allow_unknown': True,
|
||||
}
|
||||
}
|
||||
document = {'foo': {'bar': 'baz', 'corge': False}, 'thud': 'xyzzy'}
|
||||
expected = {'foo': {'bar': 'baz', 'corge': False}}
|
||||
assert_normalized(document, expected, schema, validator)
|
||||
|
||||
|
||||
def test_purge_readonly():
|
||||
schema = {
|
||||
'description': {'type': 'string', 'maxlength': 500},
|
||||
'last_updated': {'readonly': True},
|
||||
}
|
||||
validator = Validator(schema=schema, purge_readonly=True)
|
||||
document = {'description': 'it is a thing'}
|
||||
expected = deepcopy(document)
|
||||
document['last_updated'] = 'future'
|
||||
assert_normalized(document, expected, validator=validator)
|
||||
|
||||
|
||||
def test_defaults_in_allow_unknown_schema():
|
||||
schema = {'meta': {'type': 'dict'}, 'version': {'type': 'string'}}
|
||||
allow_unknown = {
|
||||
'type': 'dict',
|
||||
'schema': {
|
||||
'cfg_path': {'type': 'string', 'default': 'cfg.yaml'},
|
||||
'package': {'type': 'string'},
|
||||
},
|
||||
}
|
||||
validator = Validator(schema=schema, allow_unknown=allow_unknown)
|
||||
|
||||
document = {'version': '1.2.3', 'plugin_foo': {'package': 'foo'}}
|
||||
expected = {
|
||||
'version': '1.2.3',
|
||||
'plugin_foo': {'package': 'foo', 'cfg_path': 'cfg.yaml'},
|
||||
}
|
||||
assert_normalized(document, expected, schema, validator)
|
||||
84
Lib/site-packages/pipenv/vendor/cerberus/tests/test_registries.py
vendored
Normal file
84
Lib/site-packages/pipenv/vendor/cerberus/tests/test_registries.py
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from cerberus import schema_registry, rules_set_registry, Validator
|
||||
from cerberus.tests import (
|
||||
assert_fail,
|
||||
assert_normalized,
|
||||
assert_schema_error,
|
||||
assert_success,
|
||||
)
|
||||
|
||||
|
||||
def test_schema_registry_simple():
|
||||
schema_registry.add('foo', {'bar': {'type': 'string'}})
|
||||
schema = {'a': {'schema': 'foo'}, 'b': {'schema': 'foo'}}
|
||||
document = {'a': {'bar': 'a'}, 'b': {'bar': 'b'}}
|
||||
assert_success(document, schema)
|
||||
|
||||
|
||||
def test_top_level_reference():
|
||||
schema_registry.add('peng', {'foo': {'type': 'integer'}})
|
||||
document = {'foo': 42}
|
||||
assert_success(document, 'peng')
|
||||
|
||||
|
||||
def test_rules_set_simple():
|
||||
rules_set_registry.add('foo', {'type': 'integer'})
|
||||
assert_success({'bar': 1}, {'bar': 'foo'})
|
||||
assert_fail({'bar': 'one'}, {'bar': 'foo'})
|
||||
|
||||
|
||||
def test_allow_unknown_as_reference():
|
||||
rules_set_registry.add('foo', {'type': 'number'})
|
||||
v = Validator(allow_unknown='foo')
|
||||
assert_success({0: 1}, {}, v)
|
||||
assert_fail({0: 'one'}, {}, v)
|
||||
|
||||
|
||||
def test_recursion():
|
||||
rules_set_registry.add('self', {'type': 'dict', 'allow_unknown': 'self'})
|
||||
v = Validator(allow_unknown='self')
|
||||
assert_success({0: {1: {2: {}}}}, {}, v)
|
||||
|
||||
|
||||
def test_references_remain_unresolved(validator):
|
||||
rules_set_registry.extend(
|
||||
(('boolean', {'type': 'boolean'}), ('booleans', {'valuesrules': 'boolean'}))
|
||||
)
|
||||
validator.schema = {'foo': 'booleans'}
|
||||
assert 'booleans' == validator.schema['foo']
|
||||
assert 'boolean' == rules_set_registry._storage['booleans']['valuesrules']
|
||||
|
||||
|
||||
def test_rules_registry_with_anyof_type():
|
||||
rules_set_registry.add('string_or_integer', {'anyof_type': ['string', 'integer']})
|
||||
schema = {'soi': 'string_or_integer'}
|
||||
assert_success({'soi': 'hello'}, schema)
|
||||
|
||||
|
||||
def test_schema_registry_with_anyof_type():
|
||||
schema_registry.add('soi_id', {'id': {'anyof_type': ['string', 'integer']}})
|
||||
schema = {'soi': {'schema': 'soi_id'}}
|
||||
assert_success({'soi': {'id': 'hello'}}, schema)
|
||||
|
||||
|
||||
def test_normalization_with_rules_set():
|
||||
# https://github.com/pyeve/cerberus/issues/283
|
||||
rules_set_registry.add('foo', {'default': 42})
|
||||
assert_normalized({}, {'bar': 42}, {'bar': 'foo'})
|
||||
rules_set_registry.add('foo', {'default_setter': lambda _: 42})
|
||||
assert_normalized({}, {'bar': 42}, {'bar': 'foo'})
|
||||
rules_set_registry.add('foo', {'type': 'integer', 'nullable': True})
|
||||
assert_success({'bar': None}, {'bar': 'foo'})
|
||||
|
||||
|
||||
def test_rules_set_with_dict_field():
|
||||
document = {'a_dict': {'foo': 1}}
|
||||
schema = {'a_dict': {'type': 'dict', 'schema': {'foo': 'rule'}}}
|
||||
|
||||
# the schema's not yet added to the valid ones, so test the faulty first
|
||||
rules_set_registry.add('rule', {'tüpe': 'integer'})
|
||||
assert_schema_error(document, schema)
|
||||
|
||||
rules_set_registry.add('rule', {'type': 'integer'})
|
||||
assert_success(document, schema)
|
||||
174
Lib/site-packages/pipenv/vendor/cerberus/tests/test_schema.py
vendored
Normal file
174
Lib/site-packages/pipenv/vendor/cerberus/tests/test_schema.py
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from cerberus import Validator, errors, SchemaError
|
||||
from cerberus.schema import UnvalidatedSchema
|
||||
from cerberus.tests import assert_schema_error
|
||||
|
||||
|
||||
def test_empty_schema():
|
||||
validator = Validator()
|
||||
with pytest.raises(SchemaError, match=errors.SCHEMA_ERROR_MISSING):
|
||||
validator({}, schema=None)
|
||||
|
||||
|
||||
def test_bad_schema_type(validator):
|
||||
schema = "this string should really be dict"
|
||||
msg = errors.SCHEMA_ERROR_DEFINITION_TYPE.format(schema)
|
||||
with pytest.raises(SchemaError, match=msg):
|
||||
validator.schema = schema
|
||||
|
||||
|
||||
def test_bad_schema_type_field(validator):
|
||||
field = 'foo'
|
||||
schema = {field: {'schema': {'bar': {'type': 'strong'}}}}
|
||||
with pytest.raises(SchemaError):
|
||||
validator.schema = schema
|
||||
|
||||
|
||||
def test_unknown_rule(validator):
|
||||
msg = "{'foo': [{'unknown': ['unknown rule']}]}"
|
||||
with pytest.raises(SchemaError, match=re.escape(msg)):
|
||||
validator.schema = {'foo': {'unknown': 'rule'}}
|
||||
|
||||
|
||||
def test_unknown_type(validator):
|
||||
msg = str({'foo': [{'type': ['Unsupported types: unknown']}]})
|
||||
with pytest.raises(SchemaError, match=re.escape(msg)):
|
||||
validator.schema = {'foo': {'type': 'unknown'}}
|
||||
|
||||
|
||||
def test_bad_schema_definition(validator):
|
||||
field = 'name'
|
||||
msg = str({field: ['must be of dict type']})
|
||||
with pytest.raises(SchemaError, match=re.escape(msg)):
|
||||
validator.schema = {field: 'this should really be a dict'}
|
||||
|
||||
|
||||
def test_bad_of_rules():
|
||||
schema = {'foo': {'anyof': {'type': 'string'}}}
|
||||
assert_schema_error({}, schema)
|
||||
|
||||
|
||||
def test_normalization_rules_are_invalid_in_of_rules():
|
||||
schema = {0: {'anyof': [{'coerce': lambda x: x}]}}
|
||||
assert_schema_error({}, schema)
|
||||
|
||||
|
||||
def test_anyof_allof_schema_validate():
|
||||
# make sure schema with 'anyof' and 'allof' constraints are checked
|
||||
# correctly
|
||||
schema = {
|
||||
'doc': {'type': 'dict', 'anyof': [{'schema': [{'param': {'type': 'number'}}]}]}
|
||||
}
|
||||
assert_schema_error({'doc': 'this is my document'}, schema)
|
||||
|
||||
schema = {
|
||||
'doc': {'type': 'dict', 'allof': [{'schema': [{'param': {'type': 'number'}}]}]}
|
||||
}
|
||||
assert_schema_error({'doc': 'this is my document'}, schema)
|
||||
|
||||
|
||||
def test_repr():
|
||||
v = Validator({'foo': {'type': 'string'}})
|
||||
assert repr(v.schema) == "{'foo': {'type': 'string'}}"
|
||||
|
||||
|
||||
def test_validated_schema_cache():
|
||||
v = Validator({'foozifix': {'coerce': int}})
|
||||
cache_size = len(v._valid_schemas)
|
||||
|
||||
v = Validator({'foozifix': {'type': 'integer'}})
|
||||
cache_size += 1
|
||||
assert len(v._valid_schemas) == cache_size
|
||||
|
||||
v = Validator({'foozifix': {'coerce': int}})
|
||||
assert len(v._valid_schemas) == cache_size
|
||||
|
||||
max_cache_size = 161
|
||||
assert cache_size <= max_cache_size, (
|
||||
"There's an unexpected high amount (%s) of cached valid "
|
||||
"definition schemas. Unless you added further tests, "
|
||||
"there are good chances that something is wrong. "
|
||||
"If you added tests with new schemas, you can try to "
|
||||
"adjust the variable `max_cache_size` according to "
|
||||
"the added schemas." % cache_size
|
||||
)
|
||||
|
||||
|
||||
def test_expansion_in_nested_schema():
|
||||
schema = {'detroit': {'schema': {'anyof_regex': ['^Aladdin', 'Sane$']}}}
|
||||
v = Validator(schema)
|
||||
assert v.schema['detroit']['schema'] == {
|
||||
'anyof': [{'regex': '^Aladdin'}, {'regex': 'Sane$'}]
|
||||
}
|
||||
|
||||
|
||||
def test_unvalidated_schema_can_be_copied():
|
||||
schema = UnvalidatedSchema()
|
||||
schema_copy = schema.copy()
|
||||
assert schema_copy == schema
|
||||
|
||||
|
||||
# TODO remove with next major release
|
||||
def test_deprecated_rule_names_in_valueschema():
|
||||
def check_with(field, value, error):
|
||||
pass
|
||||
|
||||
schema = {
|
||||
"field_1": {
|
||||
"type": "dict",
|
||||
"valueschema": {
|
||||
"type": "dict",
|
||||
"keyschema": {"type": "string"},
|
||||
"valueschema": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"field_2": {
|
||||
"type": "list",
|
||||
"items": [
|
||||
{"keyschema": {}},
|
||||
{"validator": check_with},
|
||||
{"valueschema": {}},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
validator = Validator(schema)
|
||||
|
||||
assert validator.schema == {
|
||||
"field_1": {
|
||||
"type": "dict",
|
||||
"valuesrules": {
|
||||
"type": "dict",
|
||||
"keysrules": {"type": "string"},
|
||||
"valuesrules": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"field_2": {
|
||||
"type": "list",
|
||||
"items": [
|
||||
{"keysrules": {}},
|
||||
{"check_with": check_with},
|
||||
{"valuesrules": {}},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_anyof_check_with():
|
||||
def foo(field, value, error):
|
||||
pass
|
||||
|
||||
def bar(field, value, error):
|
||||
pass
|
||||
|
||||
schema = {'field': {'anyof_check_with': [foo, bar]}}
|
||||
validator = Validator(schema)
|
||||
|
||||
assert validator.schema == {
|
||||
'field': {'anyof': [{'check_with': foo}, {'check_with': bar}]}
|
||||
}
|
||||
11
Lib/site-packages/pipenv/vendor/cerberus/tests/test_utils.py
vendored
Normal file
11
Lib/site-packages/pipenv/vendor/cerberus/tests/test_utils.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
from cerberus.utils import compare_paths_lt
|
||||
|
||||
|
||||
def test_compare_paths():
|
||||
lesser = ('a_dict', 'keysrules')
|
||||
greater = ('a_dict', 'valuesrules')
|
||||
assert compare_paths_lt(lesser, greater)
|
||||
|
||||
lesser += ('type',)
|
||||
greater += ('regex',)
|
||||
assert compare_paths_lt(lesser, greater)
|
||||
1948
Lib/site-packages/pipenv/vendor/cerberus/tests/test_validation.py
vendored
Normal file
1948
Lib/site-packages/pipenv/vendor/cerberus/tests/test_validation.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
130
Lib/site-packages/pipenv/vendor/cerberus/utils.py
vendored
Normal file
130
Lib/site-packages/pipenv/vendor/cerberus/utils.py
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from cerberus.platform import _int_types, _str_type, Mapping, Sequence, Set
|
||||
|
||||
|
||||
TypeDefinition = namedtuple('TypeDefinition', 'name,included_types,excluded_types')
|
||||
"""
|
||||
This class is used to define types that can be used as value in the
|
||||
:attr:`~cerberus.Validator.types_mapping` property.
|
||||
The ``name`` should be descriptive and match the key it is going to be assigned
|
||||
to.
|
||||
A value that is validated against such definition must be an instance of any of
|
||||
the types contained in ``included_types`` and must not match any of the types
|
||||
contained in ``excluded_types``.
|
||||
"""
|
||||
|
||||
|
||||
def compare_paths_lt(x, y):
|
||||
min_length = min(len(x), len(y))
|
||||
|
||||
if x[:min_length] == y[:min_length]:
|
||||
return len(x) == min_length
|
||||
|
||||
for i in range(min_length):
|
||||
a, b = x[i], y[i]
|
||||
|
||||
for _type in (_int_types, _str_type, tuple):
|
||||
if isinstance(a, _type):
|
||||
if isinstance(b, _type):
|
||||
break
|
||||
else:
|
||||
return True
|
||||
|
||||
if a == b:
|
||||
continue
|
||||
elif a < b:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
raise RuntimeError
|
||||
|
||||
|
||||
def drop_item_from_tuple(t, i):
|
||||
return t[:i] + t[i + 1 :]
|
||||
|
||||
|
||||
def get_Validator_class():
|
||||
global Validator
|
||||
if 'Validator' not in globals():
|
||||
from cerberus.validator import Validator
|
||||
return Validator
|
||||
|
||||
|
||||
def mapping_hash(schema):
|
||||
return hash(mapping_to_frozenset(schema))
|
||||
|
||||
|
||||
def mapping_to_frozenset(mapping):
|
||||
""" Be aware that this treats any sequence type with the equal members as
|
||||
equal. As it is used to identify equality of schemas, this can be
|
||||
considered okay as definitions are semantically equal regardless the
|
||||
container type. """
|
||||
|
||||
aggregation = {}
|
||||
|
||||
for key, value in mapping.items():
|
||||
if isinstance(value, Mapping):
|
||||
aggregation[key] = mapping_to_frozenset(value)
|
||||
elif isinstance(value, Sequence):
|
||||
value = list(value)
|
||||
for i, item in enumerate(value):
|
||||
if isinstance(item, Mapping):
|
||||
value[i] = mapping_to_frozenset(item)
|
||||
aggregation[key] = tuple(value)
|
||||
elif isinstance(value, Set):
|
||||
aggregation[key] = frozenset(value)
|
||||
else:
|
||||
aggregation[key] = value
|
||||
|
||||
return frozenset(aggregation.items())
|
||||
|
||||
|
||||
def quote_string(value):
|
||||
if isinstance(value, _str_type):
|
||||
return '"%s"' % value
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class readonly_classproperty(property):
|
||||
def __get__(self, instance, owner):
|
||||
return super(readonly_classproperty, self).__get__(owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise RuntimeError('This is a readonly class property.')
|
||||
|
||||
def __delete__(self, instance):
|
||||
raise RuntimeError('This is a readonly class property.')
|
||||
|
||||
|
||||
def validator_factory(name, bases=None, namespace={}):
|
||||
""" Dynamically create a :class:`~cerberus.Validator` subclass.
|
||||
Docstrings of mixin-classes will be added to the resulting
|
||||
class' one if ``__doc__`` is not in :obj:`namespace`.
|
||||
|
||||
:param name: The name of the new class.
|
||||
:type name: :class:`str`
|
||||
:param bases: Class(es) with additional and overriding attributes.
|
||||
:type bases: :class:`tuple` of or a single :term:`class`
|
||||
:param namespace: Attributes for the new class.
|
||||
:type namespace: :class:`dict`
|
||||
:return: The created class.
|
||||
"""
|
||||
Validator = get_Validator_class()
|
||||
|
||||
if bases is None:
|
||||
bases = (Validator,)
|
||||
elif isinstance(bases, tuple):
|
||||
bases += (Validator,)
|
||||
else:
|
||||
bases = (bases, Validator)
|
||||
|
||||
docstrings = [x.__doc__ for x in bases if x.__doc__]
|
||||
if len(docstrings) > 1 and '__doc__' not in namespace:
|
||||
namespace.update({'__doc__': '\n'.join(docstrings)})
|
||||
|
||||
return type(name, bases, namespace)
|
||||
1615
Lib/site-packages/pipenv/vendor/cerberus/validator.py
vendored
Normal file
1615
Lib/site-packages/pipenv/vendor/cerberus/validator.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
21
Lib/site-packages/pipenv/vendor/certifi/LICENSE
vendored
Normal file
21
Lib/site-packages/pipenv/vendor/certifi/LICENSE
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
This packge contains a modified version of ca-bundle.crt:
|
||||
|
||||
ca-bundle.crt -- Bundle of CA Root Certificates
|
||||
|
||||
Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
|
||||
This is a bundle of X.509 certificates of public Certificate Authorities
|
||||
(CA). These were automatically extracted from Mozilla's root certificates
|
||||
file (certdata.txt). This file can be found in the mozilla source tree:
|
||||
http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
|
||||
It contains the certificates in PEM format and therefore
|
||||
can be directly used with curl / libcurl / php_curl, or with
|
||||
an Apache+mod_ssl webserver for SSL client authentication.
|
||||
Just configure this file as the SSLCACertificateFile.#
|
||||
|
||||
***** BEGIN LICENSE BLOCK *****
|
||||
This Source Code Form is subject to the terms of the Mozilla Public License,
|
||||
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
|
||||
one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
***** END LICENSE BLOCK *****
|
||||
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
|
||||
3
Lib/site-packages/pipenv/vendor/certifi/__init__.py
vendored
Normal file
3
Lib/site-packages/pipenv/vendor/certifi/__init__.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from .core import contents, where
|
||||
|
||||
__version__ = "2020.04.05.1"
|
||||
12
Lib/site-packages/pipenv/vendor/certifi/__main__.py
vendored
Normal file
12
Lib/site-packages/pipenv/vendor/certifi/__main__.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import argparse
|
||||
|
||||
from certifi import contents, where
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-c", "--contents", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.contents:
|
||||
print(contents())
|
||||
else:
|
||||
print(where())
|
||||
BIN
Lib/site-packages/pipenv/vendor/certifi/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/certifi/__pycache__/__init__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/certifi/__pycache__/__main__.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/certifi/__pycache__/__main__.cpython-38.pyc
vendored
Normal file
Binary file not shown.
BIN
Lib/site-packages/pipenv/vendor/certifi/__pycache__/core.cpython-38.pyc
vendored
Normal file
BIN
Lib/site-packages/pipenv/vendor/certifi/__pycache__/core.cpython-38.pyc
vendored
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user